var/home/core/zuul-output/0000755000175000017500000000000015126451227014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015126463536015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004677262415126463527017732 0ustar rootrootJan 04 11:48:20 crc systemd[1]: Starting Kubernetes Kubelet... Jan 04 11:48:20 crc restorecon[4752]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:20 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:21 crc restorecon[4752]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 04 11:48:21 crc kubenswrapper[4797]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.283713 4797 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286861 4797 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286880 4797 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286886 4797 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286890 4797 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286894 4797 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286897 4797 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286902 4797 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286906 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286911 4797 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286915 4797 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286921 4797 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286927 4797 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286932 4797 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286936 4797 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286939 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286943 4797 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286947 4797 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286950 4797 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286954 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286958 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286961 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286965 4797 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286968 4797 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286972 4797 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286975 4797 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.286979 4797 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287005 4797 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287009 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287014 4797 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287018 4797 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287022 4797 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287026 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287029 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287033 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287037 4797 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287040 4797 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287044 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287047 4797 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287051 4797 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287054 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287058 4797 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287061 4797 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287064 4797 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287069 4797 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287073 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287076 4797 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287080 4797 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287083 4797 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287088 4797 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287092 4797 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287096 4797 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287100 4797 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287104 4797 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287109 4797 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287113 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287116 4797 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287120 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287123 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287127 4797 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287131 4797 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287135 4797 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287140 4797 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287145 4797 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287149 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287153 4797 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287157 4797 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287161 4797 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287165 4797 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287169 4797 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287173 4797 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.287176 4797 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287385 4797 flags.go:64] FLAG: --address="0.0.0.0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287407 4797 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287415 4797 flags.go:64] FLAG: --anonymous-auth="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287421 4797 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287427 4797 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287432 4797 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287438 4797 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287443 4797 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287447 4797 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287451 4797 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287456 4797 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287460 4797 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287465 4797 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287469 4797 flags.go:64] FLAG: --cgroup-root="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287473 4797 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287477 4797 flags.go:64] FLAG: --client-ca-file="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287481 4797 flags.go:64] FLAG: --cloud-config="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287484 4797 flags.go:64] FLAG: --cloud-provider="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287488 4797 flags.go:64] FLAG: --cluster-dns="[]" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287493 4797 flags.go:64] FLAG: --cluster-domain="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287497 4797 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287501 4797 flags.go:64] FLAG: --config-dir="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287506 4797 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287510 4797 flags.go:64] FLAG: --container-log-max-files="5" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287516 4797 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287520 4797 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287524 4797 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287528 4797 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287532 4797 flags.go:64] FLAG: --contention-profiling="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287536 4797 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287540 4797 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287545 4797 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287549 4797 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287554 4797 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287559 4797 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287563 4797 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287567 4797 flags.go:64] FLAG: --enable-load-reader="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287571 4797 flags.go:64] FLAG: --enable-server="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287575 4797 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287582 4797 flags.go:64] FLAG: --event-burst="100" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287593 4797 flags.go:64] FLAG: --event-qps="50" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287597 4797 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287601 4797 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287605 4797 flags.go:64] FLAG: --eviction-hard="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287610 4797 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287615 4797 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287619 4797 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287623 4797 flags.go:64] FLAG: --eviction-soft="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287627 4797 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287631 4797 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287635 4797 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287639 4797 flags.go:64] FLAG: --experimental-mounter-path="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287643 4797 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287647 4797 flags.go:64] FLAG: --fail-swap-on="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287651 4797 flags.go:64] FLAG: --feature-gates="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287656 4797 flags.go:64] FLAG: --file-check-frequency="20s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287661 4797 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287665 4797 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287669 4797 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287673 4797 flags.go:64] FLAG: --healthz-port="10248" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287677 4797 flags.go:64] FLAG: --help="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287681 4797 flags.go:64] FLAG: --hostname-override="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287685 4797 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287689 4797 flags.go:64] FLAG: --http-check-frequency="20s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287693 4797 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287698 4797 flags.go:64] FLAG: --image-credential-provider-config="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287703 4797 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287708 4797 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287714 4797 flags.go:64] FLAG: --image-service-endpoint="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287719 4797 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287724 4797 flags.go:64] FLAG: --kube-api-burst="100" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287729 4797 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287733 4797 flags.go:64] FLAG: --kube-api-qps="50" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287737 4797 flags.go:64] FLAG: --kube-reserved="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287742 4797 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287745 4797 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287751 4797 flags.go:64] FLAG: --kubelet-cgroups="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287756 4797 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287760 4797 flags.go:64] FLAG: --lock-file="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287764 4797 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287768 4797 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287772 4797 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287779 4797 flags.go:64] FLAG: --log-json-split-stream="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287783 4797 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287787 4797 flags.go:64] FLAG: --log-text-split-stream="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287791 4797 flags.go:64] FLAG: --logging-format="text" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287795 4797 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287800 4797 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287804 4797 flags.go:64] FLAG: --manifest-url="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287807 4797 flags.go:64] FLAG: --manifest-url-header="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287813 4797 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287817 4797 flags.go:64] FLAG: --max-open-files="1000000" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287822 4797 flags.go:64] FLAG: --max-pods="110" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287826 4797 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287830 4797 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287834 4797 flags.go:64] FLAG: --memory-manager-policy="None" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287838 4797 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287843 4797 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287847 4797 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287851 4797 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287863 4797 flags.go:64] FLAG: --node-status-max-images="50" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287867 4797 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287872 4797 flags.go:64] FLAG: --oom-score-adj="-999" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287876 4797 flags.go:64] FLAG: --pod-cidr="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287880 4797 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287887 4797 flags.go:64] FLAG: --pod-manifest-path="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287892 4797 flags.go:64] FLAG: --pod-max-pids="-1" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287897 4797 flags.go:64] FLAG: --pods-per-core="0" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287901 4797 flags.go:64] FLAG: --port="10250" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287905 4797 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287909 4797 flags.go:64] FLAG: --provider-id="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287913 4797 flags.go:64] FLAG: --qos-reserved="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287918 4797 flags.go:64] FLAG: --read-only-port="10255" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287922 4797 flags.go:64] FLAG: --register-node="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287927 4797 flags.go:64] FLAG: --register-schedulable="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287931 4797 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287938 4797 flags.go:64] FLAG: --registry-burst="10" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287943 4797 flags.go:64] FLAG: --registry-qps="5" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287948 4797 flags.go:64] FLAG: --reserved-cpus="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287953 4797 flags.go:64] FLAG: --reserved-memory="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287958 4797 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287963 4797 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287968 4797 flags.go:64] FLAG: --rotate-certificates="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.287973 4797 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288012 4797 flags.go:64] FLAG: --runonce="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288016 4797 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288020 4797 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288025 4797 flags.go:64] FLAG: --seccomp-default="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288029 4797 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288033 4797 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288037 4797 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288041 4797 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288046 4797 flags.go:64] FLAG: --storage-driver-password="root" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288053 4797 flags.go:64] FLAG: --storage-driver-secure="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288057 4797 flags.go:64] FLAG: --storage-driver-table="stats" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288061 4797 flags.go:64] FLAG: --storage-driver-user="root" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288065 4797 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288069 4797 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288073 4797 flags.go:64] FLAG: --system-cgroups="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288077 4797 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288084 4797 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288087 4797 flags.go:64] FLAG: --tls-cert-file="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288091 4797 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288096 4797 flags.go:64] FLAG: --tls-min-version="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288100 4797 flags.go:64] FLAG: --tls-private-key-file="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288105 4797 flags.go:64] FLAG: --topology-manager-policy="none" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288109 4797 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288113 4797 flags.go:64] FLAG: --topology-manager-scope="container" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288117 4797 flags.go:64] FLAG: --v="2" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288123 4797 flags.go:64] FLAG: --version="false" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288128 4797 flags.go:64] FLAG: --vmodule="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288133 4797 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288137 4797 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288239 4797 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288243 4797 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288247 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288251 4797 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288256 4797 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288259 4797 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288263 4797 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288267 4797 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288271 4797 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288274 4797 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288277 4797 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288281 4797 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288286 4797 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288290 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288293 4797 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288297 4797 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288300 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288304 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288308 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288311 4797 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288314 4797 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288318 4797 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288321 4797 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288325 4797 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288328 4797 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288333 4797 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288337 4797 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288341 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288345 4797 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288349 4797 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288353 4797 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288356 4797 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288360 4797 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288364 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288367 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288371 4797 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288374 4797 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288377 4797 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288381 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288385 4797 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288388 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288392 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288395 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288399 4797 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288403 4797 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288407 4797 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288410 4797 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288414 4797 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288417 4797 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288421 4797 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288424 4797 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288428 4797 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288431 4797 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288435 4797 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288438 4797 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288441 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288445 4797 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288448 4797 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288452 4797 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288456 4797 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288460 4797 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288463 4797 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288467 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288471 4797 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288476 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288480 4797 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288484 4797 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288488 4797 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288492 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288496 4797 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.288500 4797 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.288509 4797 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.298818 4797 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.298889 4797 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299088 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299115 4797 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299124 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299136 4797 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299146 4797 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299155 4797 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299164 4797 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299173 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299211 4797 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299221 4797 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299229 4797 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299237 4797 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299245 4797 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299253 4797 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299261 4797 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299270 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299278 4797 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299286 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299294 4797 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299302 4797 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299310 4797 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299318 4797 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299325 4797 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299336 4797 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299346 4797 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299355 4797 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299362 4797 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299370 4797 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299378 4797 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299386 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299394 4797 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299401 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299412 4797 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299425 4797 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299437 4797 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299448 4797 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299457 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299466 4797 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299475 4797 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299485 4797 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299493 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299502 4797 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299510 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299519 4797 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299527 4797 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299536 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299545 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299553 4797 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299561 4797 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299571 4797 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299580 4797 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299588 4797 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299596 4797 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299604 4797 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299612 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299620 4797 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299627 4797 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299635 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299642 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299650 4797 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299658 4797 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299666 4797 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299673 4797 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299681 4797 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299689 4797 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299697 4797 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299705 4797 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299713 4797 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299720 4797 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299728 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299737 4797 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.299750 4797 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.299980 4797 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300019 4797 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300029 4797 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300038 4797 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300047 4797 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300057 4797 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300068 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300077 4797 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300087 4797 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300097 4797 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300105 4797 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300113 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300120 4797 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300128 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300136 4797 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300144 4797 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300152 4797 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300160 4797 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300168 4797 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300177 4797 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300185 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300192 4797 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300200 4797 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300207 4797 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300215 4797 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300226 4797 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300235 4797 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300243 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300251 4797 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300259 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300268 4797 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300276 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300284 4797 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300292 4797 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300300 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300308 4797 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300316 4797 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300323 4797 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300331 4797 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300339 4797 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300347 4797 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300358 4797 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300370 4797 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300380 4797 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300390 4797 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300399 4797 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300408 4797 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300417 4797 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300425 4797 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300433 4797 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300441 4797 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300450 4797 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300457 4797 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300465 4797 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300473 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300480 4797 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300488 4797 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300496 4797 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300504 4797 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300511 4797 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300519 4797 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300527 4797 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300535 4797 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300543 4797 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300550 4797 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300558 4797 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300566 4797 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300573 4797 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300581 4797 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300589 4797 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.300598 4797 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.300610 4797 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.301217 4797 server.go:940] "Client rotation is on, will bootstrap in background" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.305544 4797 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.305679 4797 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.306609 4797 server.go:997] "Starting client certificate rotation" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.306655 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.306883 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-25 06:05:48.205196349 +0000 UTC Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.307034 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.315026 4797 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.316483 4797 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.320773 4797 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.330324 4797 log.go:25] "Validated CRI v1 runtime API" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.354466 4797 log.go:25] "Validated CRI v1 image API" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.356638 4797 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.359050 4797 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-04-11-43-29-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.359092 4797 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.384429 4797 manager.go:217] Machine: {Timestamp:2026-01-04 11:48:21.382420315 +0000 UTC m=+0.239607104 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:b11c24b9-47b3-405e-94d5-79769a53822b BootID:e6c2e325-522a-4f6c-bbaa-70b27798188f Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:da:f4:29 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:da:f4:29 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c2:72:76 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:c9:b4:a2 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:53:09:26 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:4d:d8:90 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:59:dd:8d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:c6:d9:36:fe:e7:74 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:c2:ef:22:16:2e:ef Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.384811 4797 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.385062 4797 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.385730 4797 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386128 4797 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386177 4797 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386582 4797 topology_manager.go:138] "Creating topology manager with none policy" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386601 4797 container_manager_linux.go:303] "Creating device plugin manager" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386914 4797 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.386963 4797 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.387318 4797 state_mem.go:36] "Initialized new in-memory state store" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.387446 4797 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.388608 4797 kubelet.go:418] "Attempting to sync node with API server" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.388645 4797 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.388683 4797 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.388705 4797 kubelet.go:324] "Adding apiserver pod source" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.388725 4797 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.391520 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.391687 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.391605 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.391811 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.392316 4797 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.393236 4797 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.396329 4797 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397264 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397310 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397326 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397340 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397362 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397376 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397390 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397411 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397428 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397464 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397483 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.397498 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.398427 4797 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.399276 4797 server.go:1280] "Started kubelet" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.399700 4797 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.399814 4797 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.400219 4797 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.400560 4797 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 04 11:48:21 crc systemd[1]: Started Kubernetes Kubelet. Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402152 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402245 4797 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402422 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 14:18:12.905671179 +0000 UTC Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402635 4797 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402651 4797 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.402844 4797 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.403411 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.403491 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.404484 4797 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188784b742b4b98a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:48:21.399230858 +0000 UTC m=+0.256417627,LastTimestamp:2026-01-04 11:48:21.399230858 +0000 UTC m=+0.256417627,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.404772 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="200ms" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.402972 4797 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.408087 4797 server.go:460] "Adding debug handlers to kubelet server" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.408153 4797 factory.go:55] Registering systemd factory Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.408181 4797 factory.go:221] Registration of the systemd container factory successfully Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.408884 4797 factory.go:153] Registering CRI-O factory Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.409474 4797 factory.go:221] Registration of the crio container factory successfully Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.410505 4797 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.410564 4797 factory.go:103] Registering Raw factory Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.410669 4797 manager.go:1196] Started watching for new ooms in manager Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.412720 4797 manager.go:319] Starting recovery of all containers Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416192 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416378 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416427 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416469 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416509 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416665 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416696 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416734 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416768 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416805 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416831 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.416906 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417070 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417168 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417626 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417701 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417730 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417792 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417816 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417846 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417867 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.417890 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418012 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418033 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418060 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418322 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418513 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418655 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418790 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.418817 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.419077 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.421826 4797 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.421889 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.421915 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.421936 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.421957 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422031 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422052 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422071 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422089 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422107 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422126 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422176 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422198 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422219 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422237 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422255 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422277 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422295 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422314 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422332 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422351 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422370 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422397 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422419 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422441 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422463 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422482 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422502 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422521 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422539 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422557 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422577 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422598 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422617 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422635 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422662 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422682 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422703 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422721 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422740 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422758 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422775 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422794 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422812 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422830 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422848 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422866 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422885 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422903 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422920 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422938 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422956 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.422975 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423025 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423044 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423062 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423081 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423101 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423119 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423136 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423153 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423172 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423203 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423223 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423298 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423317 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423338 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423358 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423377 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423395 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423423 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423442 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423462 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423483 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423526 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423547 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423567 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423587 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423606 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.423628 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425079 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425137 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425168 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425199 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425229 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425257 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425284 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425310 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425333 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425362 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425391 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425417 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425441 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425469 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425498 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425522 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425549 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425575 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425601 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425630 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425659 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425683 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425707 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425731 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425755 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425795 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425822 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425846 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425868 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425895 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425917 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425941 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.425962 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426019 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426045 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426067 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426466 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426493 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426517 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426539 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426564 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426592 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426615 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426639 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426664 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426692 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426726 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426751 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426774 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426798 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426823 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426847 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426871 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426896 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426920 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426943 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.426970 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427031 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427057 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427083 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427106 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427131 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427157 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427183 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427205 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427226 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427246 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427274 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427307 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427344 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427363 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427382 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427408 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427438 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427457 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427486 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427506 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427526 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427551 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427577 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427602 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427623 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427641 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427669 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427689 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427708 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427727 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427745 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427771 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427798 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427817 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427843 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427870 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427898 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427924 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427949 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.427974 4797 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.428023 4797 reconstruct.go:97] "Volume reconstruction finished" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.428054 4797 reconciler.go:26] "Reconciler: start to sync state" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.447523 4797 manager.go:324] Recovery completed Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.465017 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.467341 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.467415 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.467439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.469283 4797 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.469326 4797 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.469343 4797 state_mem.go:36] "Initialized new in-memory state store" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.470534 4797 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.472695 4797 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.472760 4797 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.472799 4797 kubelet.go:2335] "Starting kubelet main sync loop" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.472876 4797 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.473789 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.473874 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.482910 4797 policy_none.go:49] "None policy: Start" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.484233 4797 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.484327 4797 state_mem.go:35] "Initializing new in-memory state store" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.506359 4797 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.540544 4797 manager.go:334] "Starting Device Plugin manager" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.540633 4797 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.540653 4797 server.go:79] "Starting device plugin registration server" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.541342 4797 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.541371 4797 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.541711 4797 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.541814 4797 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.541824 4797 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.553265 4797 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.573633 4797 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.573724 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.574855 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.574908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.574919 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.575078 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.575728 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.575913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576045 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576164 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576017 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576636 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.576674 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577109 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577157 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577367 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577511 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.577568 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580026 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580086 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580087 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580121 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580172 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580193 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.580476 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.581018 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.581261 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584096 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584131 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584143 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584348 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584370 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584384 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584571 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.584604 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.585331 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.585561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.586272 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.586291 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.586439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.586468 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.587171 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.587234 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.587259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.606703 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="400ms" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631281 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631334 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631369 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631457 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631521 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631578 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631620 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631649 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631679 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631718 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631756 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631790 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631817 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631845 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.631872 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.642533 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.643841 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.643890 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.643908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.643938 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.644502 4797 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.22:6443: connect: connection refused" node="crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733235 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733345 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733442 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733548 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733570 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733579 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733688 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733664 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733831 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733895 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733838 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733871 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733946 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.733977 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734036 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734065 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734093 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734119 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734146 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734174 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734202 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734351 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734463 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734523 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734570 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734582 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734628 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734659 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734711 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.734751 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.845298 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.846710 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.846754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.846770 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.846800 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:21 crc kubenswrapper[4797]: E0104 11:48:21.847198 4797 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.22:6443: connect: connection refused" node="crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.918186 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.923394 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.929906 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.948867 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-8bd590393a9dc86a8e2939a1c64161343778b43087d5d09ea9c3a111ac87b4e0 WatchSource:0}: Error finding container 8bd590393a9dc86a8e2939a1c64161343778b43087d5d09ea9c3a111ac87b4e0: Status 404 returned error can't find the container with id 8bd590393a9dc86a8e2939a1c64161343778b43087d5d09ea9c3a111ac87b4e0 Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.952259 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-2350ad553680ba59a72b3621fcbc300c6eda590c3c16ec48c0fab296e905efae WatchSource:0}: Error finding container 2350ad553680ba59a72b3621fcbc300c6eda590c3c16ec48c0fab296e905efae: Status 404 returned error can't find the container with id 2350ad553680ba59a72b3621fcbc300c6eda590c3c16ec48c0fab296e905efae Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.952531 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-77d45cf296a510bdc2a7278454cfc2e99c8510d15e6b254937f8a035c41411ca WatchSource:0}: Error finding container 77d45cf296a510bdc2a7278454cfc2e99c8510d15e6b254937f8a035c41411ca: Status 404 returned error can't find the container with id 77d45cf296a510bdc2a7278454cfc2e99c8510d15e6b254937f8a035c41411ca Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.961855 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: I0104 11:48:21.971063 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:21 crc kubenswrapper[4797]: W0104 11:48:21.997560 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-93167a4a8a4a5556dd6ee6fe410148ed5945876095fc6617e8eb3fd8055e0701 WatchSource:0}: Error finding container 93167a4a8a4a5556dd6ee6fe410148ed5945876095fc6617e8eb3fd8055e0701: Status 404 returned error can't find the container with id 93167a4a8a4a5556dd6ee6fe410148ed5945876095fc6617e8eb3fd8055e0701 Jan 04 11:48:22 crc kubenswrapper[4797]: W0104 11:48:21.999969 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-657da3fa31b1781bd44c951f783084ac70243f02a391f55d7cdd728b3048790e WatchSource:0}: Error finding container 657da3fa31b1781bd44c951f783084ac70243f02a391f55d7cdd728b3048790e: Status 404 returned error can't find the container with id 657da3fa31b1781bd44c951f783084ac70243f02a391f55d7cdd728b3048790e Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.007675 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="800ms" Jan 04 11:48:22 crc kubenswrapper[4797]: W0104 11:48:22.240614 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.240713 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.248002 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.249323 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.249364 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.249373 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.249396 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.249774 4797 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.22:6443: connect: connection refused" node="crc" Jan 04 11:48:22 crc kubenswrapper[4797]: W0104 11:48:22.301537 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.301618 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.400652 4797 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.402813 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 11:24:12.467008603 +0000 UTC Jan 04 11:48:22 crc kubenswrapper[4797]: W0104 11:48:22.477114 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.477238 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.479440 4797 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf" exitCode=0 Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.479483 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.479641 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"657da3fa31b1781bd44c951f783084ac70243f02a391f55d7cdd728b3048790e"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.479757 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.480715 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07" exitCode=0 Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.480740 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.480765 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"93167a4a8a4a5556dd6ee6fe410148ed5945876095fc6617e8eb3fd8055e0701"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.480830 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481229 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481275 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481293 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481437 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481453 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.481462 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.483007 4797 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="6d028001ecc3a35ef3a28af9a5d2a92ccad372f03d31b16c87ee6f45377fb223" exitCode=0 Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.483045 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"6d028001ecc3a35ef3a28af9a5d2a92ccad372f03d31b16c87ee6f45377fb223"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.483087 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"77d45cf296a510bdc2a7278454cfc2e99c8510d15e6b254937f8a035c41411ca"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.483173 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.483232 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484147 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484165 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484572 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484609 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.484626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.487054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.487086 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2350ad553680ba59a72b3621fcbc300c6eda590c3c16ec48c0fab296e905efae"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.488483 4797 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f65bbfb59abeecc3f46094d24df00ad7caa6c25df6675e83e6a047ebba5a516f" exitCode=0 Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.488519 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f65bbfb59abeecc3f46094d24df00ad7caa6c25df6675e83e6a047ebba5a516f"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.488537 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8bd590393a9dc86a8e2939a1c64161343778b43087d5d09ea9c3a111ac87b4e0"} Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.488626 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.489316 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.489349 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:22 crc kubenswrapper[4797]: I0104 11:48:22.489359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[4797]: W0104 11:48:22.635881 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.22:6443: connect: connection refused Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.636022 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.22:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:22 crc kubenswrapper[4797]: E0104 11:48:22.809150 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="1.6s" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.051051 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.052160 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.052195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.052203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.052224 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:23 crc kubenswrapper[4797]: E0104 11:48:23.052626 4797 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.22:6443: connect: connection refused" node="crc" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.403509 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 02:54:37.556014999 +0000 UTC Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.494495 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.494553 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.494570 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.494667 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.495441 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.495467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.495477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.503582 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.503632 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.503654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.503673 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.506655 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"62e4ba103f65b5553aadfc5704a47c082736d3c8c105d3af379e628146172be8"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.506773 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.508077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.508123 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.508141 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.512747 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.512803 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.512830 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.512927 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.514059 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.514109 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.514130 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.515710 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.517889 4797 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="38f225bf7e2f15bc2495ef2eb0cb779ecf50501c951377e16cb3fdb8f0aedd97" exitCode=0 Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.517950 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"38f225bf7e2f15bc2495ef2eb0cb779ecf50501c951377e16cb3fdb8f0aedd97"} Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.518143 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.519306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.519356 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:23 crc kubenswrapper[4797]: I0104 11:48:23.519379 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.404419 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 05:00:14.164064494 +0000 UTC Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.527772 4797 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6bafc949d374558426f1caa5bba3cd6ec0f23c2d580d67fa181962e7ebad3bc9" exitCode=0 Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.527889 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6bafc949d374558426f1caa5bba3cd6ec0f23c2d580d67fa181962e7ebad3bc9"} Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.528240 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.529922 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.530052 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.530083 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.534334 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546"} Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.534385 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.534397 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.535982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.536102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.536126 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.536175 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.536210 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.536228 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.595362 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.653605 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.655439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.655509 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.655532 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:24 crc kubenswrapper[4797]: I0104 11:48:24.655582 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.241400 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.405566 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 09:07:37.117542941 +0000 UTC Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.405722 4797 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 69h19m11.711825338s for next certificate rotation Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.541817 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1be4c138ac501052c791096381afde23e1406f6d449dfcdb9f3397a3d9c6841e"} Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.542396 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a33476cb580cb97950e6972773e76ca8452f3e6422d1c78a02e28760942a4fa3"} Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.542437 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8a595630f827ad04cafe6b494c6e466f4bdc5d0cabea4aaf5f2db1de9a6be0b1"} Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.541950 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.541897 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.542628 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.543936 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.544037 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.544062 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.544533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.544573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:25 crc kubenswrapper[4797]: I0104 11:48:25.544587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.559562 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"38bd19d0d42c858b7416b0541d12a2342b666fed7a6c59b876162585f6cc0690"} Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.559632 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.559654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"07f523069a67baff1915c0e420344de8cd225d955cf2494b3668287db2019ce0"} Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.559660 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561676 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561678 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:26 crc kubenswrapper[4797]: I0104 11:48:26.561701 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.123198 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.123441 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.123503 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.125787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.125878 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.125900 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.563126 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.564572 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.564636 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.564661 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.595591 4797 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.595714 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 11:48:27 crc kubenswrapper[4797]: I0104 11:48:27.980639 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.376800 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.376964 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.377058 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.378418 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.378469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.378487 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.566251 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.567548 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.567593 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[4797]: I0104 11:48:28.567611 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[4797]: I0104 11:48:29.123798 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:29 crc kubenswrapper[4797]: I0104 11:48:29.124055 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:29 crc kubenswrapper[4797]: I0104 11:48:29.125504 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[4797]: I0104 11:48:29.125564 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[4797]: I0104 11:48:29.125584 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[4797]: I0104 11:48:30.573098 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:30 crc kubenswrapper[4797]: I0104 11:48:30.573348 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:30 crc kubenswrapper[4797]: I0104 11:48:30.574670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[4797]: I0104 11:48:30.574725 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[4797]: I0104 11:48:30.574742 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[4797]: E0104 11:48:31.554080 4797 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.724307 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.724493 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.725852 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.725913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.725943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.769218 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:31 crc kubenswrapper[4797]: I0104 11:48:31.776444 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:32 crc kubenswrapper[4797]: I0104 11:48:32.577229 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:32 crc kubenswrapper[4797]: I0104 11:48:32.578620 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[4797]: I0104 11:48:32.578656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[4797]: I0104 11:48:32.578666 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[4797]: I0104 11:48:32.583891 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:33 crc kubenswrapper[4797]: I0104 11:48:33.401254 4797 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 04 11:48:33 crc kubenswrapper[4797]: E0104 11:48:33.518497 4797 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 04 11:48:33 crc kubenswrapper[4797]: I0104 11:48:33.579372 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:33 crc kubenswrapper[4797]: I0104 11:48:33.580766 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[4797]: I0104 11:48:33.580859 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[4797]: I0104 11:48:33.580878 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[4797]: W0104 11:48:34.133286 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.133412 4797 trace.go:236] Trace[222278807]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:24.131) (total time: 10001ms): Jan 04 11:48:34 crc kubenswrapper[4797]: Trace[222278807]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:48:34.133) Jan 04 11:48:34 crc kubenswrapper[4797]: Trace[222278807]: [10.00187675s] [10.00187675s] END Jan 04 11:48:34 crc kubenswrapper[4797]: E0104 11:48:34.133444 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 04 11:48:34 crc kubenswrapper[4797]: W0104 11:48:34.330835 4797 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.330965 4797 trace.go:236] Trace[1485088791]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:24.329) (total time: 10001ms): Jan 04 11:48:34 crc kubenswrapper[4797]: Trace[1485088791]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:48:34.330) Jan 04 11:48:34 crc kubenswrapper[4797]: Trace[1485088791]: [10.00116501s] [10.00116501s] END Jan 04 11:48:34 crc kubenswrapper[4797]: E0104 11:48:34.331031 4797 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 04 11:48:34 crc kubenswrapper[4797]: E0104 11:48:34.411180 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.544560 4797 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.544666 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.549500 4797 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.549572 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.582412 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.587335 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.587380 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[4797]: I0104 11:48:34.587398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.221080 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.221296 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.222420 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.222458 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.222471 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.267781 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.584979 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.586371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.586407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.586423 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[4797]: I0104 11:48:35.613290 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 04 11:48:36 crc kubenswrapper[4797]: I0104 11:48:36.586580 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:36 crc kubenswrapper[4797]: I0104 11:48:36.587604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[4797]: I0104 11:48:36.587644 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[4797]: I0104 11:48:36.587670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[4797]: I0104 11:48:37.596366 4797 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:48:37 crc kubenswrapper[4797]: I0104 11:48:37.596417 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 11:48:37 crc kubenswrapper[4797]: I0104 11:48:37.795449 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:37 crc kubenswrapper[4797]: I0104 11:48:37.810838 4797 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 04 11:48:37 crc kubenswrapper[4797]: I0104 11:48:37.911978 4797 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.383127 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.389716 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.400022 4797 apiserver.go:52] "Watching apiserver" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.404424 4797 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.404920 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.405496 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.405590 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.405541 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.405660 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:38 crc kubenswrapper[4797]: E0104 11:48:38.405740 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:38 crc kubenswrapper[4797]: E0104 11:48:38.406102 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.406339 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:38 crc kubenswrapper[4797]: E0104 11:48:38.406441 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.406349 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.408139 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.408419 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.408628 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.408946 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.408974 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.409070 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.409329 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.409704 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.410734 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.410965 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.450625 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.470233 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.490758 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.503808 4797 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.508320 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.524501 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.541581 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.555917 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: I0104 11:48:38.569565 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:38 crc kubenswrapper[4797]: E0104 11:48:38.602237 4797 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.474287 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.474488 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.540418 4797 trace.go:236] Trace[988261972]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:24.989) (total time: 14551ms): Jan 04 11:48:39 crc kubenswrapper[4797]: Trace[988261972]: ---"Objects listed" error: 14551ms (11:48:39.540) Jan 04 11:48:39 crc kubenswrapper[4797]: Trace[988261972]: [14.551246953s] [14.551246953s] END Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.540467 4797 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.541087 4797 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.541400 4797 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.545279 4797 trace.go:236] Trace[1134917699]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:24.625) (total time: 14920ms): Jan 04 11:48:39 crc kubenswrapper[4797]: Trace[1134917699]: ---"Objects listed" error: 14919ms (11:48:39.544) Jan 04 11:48:39 crc kubenswrapper[4797]: Trace[1134917699]: [14.920053937s] [14.920053937s] END Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.545323 4797 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.594492 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.598645 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.612282 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.630013 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642185 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642233 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642265 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642287 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642311 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642359 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642379 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642612 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642667 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642776 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.642853 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643094 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643501 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643520 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643592 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643657 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643902 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643938 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.643687 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644013 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644060 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644087 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644619 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644298 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644562 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644765 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644791 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.644812 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645156 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645315 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645364 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645404 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645479 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645763 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645794 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645874 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645911 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645951 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645956 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.645969 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646016 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646032 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646047 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646066 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646081 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646096 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646110 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.646125 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647260 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647407 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647728 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647054 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647080 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647265 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647283 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647699 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647779 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.647831 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.648126 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.648314 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.649093 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.649523 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651168 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.648333 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651343 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651371 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651461 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651554 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651673 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651768 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.651918 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652054 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652198 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652349 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652579 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652688 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.652854 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653078 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653173 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653230 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653285 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653409 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653637 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653777 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653842 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653877 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653964 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654132 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654176 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654211 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654539 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654582 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654614 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654655 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654688 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654789 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654820 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654966 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.653959 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.654563 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.655058 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.655120 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.655238 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.655676 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.656141 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.656202 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.656012 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.657935 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.656625 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.663609 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.664084 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.665840 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.665978 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666266 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666311 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666500 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666626 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666980 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.666980 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.667057 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.667133 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.667323 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.667329 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.667885 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668007 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668046 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668072 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668095 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668118 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668141 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668149 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668162 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668221 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668217 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668236 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668223 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668302 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668320 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668325 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668336 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668387 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668413 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668444 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668462 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668487 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668526 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668546 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668569 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668586 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668582 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668592 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668628 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668655 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668696 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668717 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668756 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668791 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668821 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668853 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668883 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668915 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668947 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.668977 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669031 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669063 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669100 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669135 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669166 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669194 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669225 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669255 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669288 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669319 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669352 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669385 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669418 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669436 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669452 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669488 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669519 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669556 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669591 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669621 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669630 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669657 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669692 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669724 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669756 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669827 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669856 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669886 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669918 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669953 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.669974 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670196 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670238 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670271 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670286 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670304 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670435 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670454 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670472 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670505 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670538 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670571 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670603 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670637 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670671 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670701 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670735 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670770 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670805 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670865 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670896 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670929 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670944 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.670968 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671032 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671070 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671130 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671168 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671202 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671237 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671274 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671309 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671341 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671375 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671382 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671407 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671446 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671478 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671510 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671544 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671578 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671610 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671645 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671677 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671711 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671745 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671777 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671803 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671811 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671847 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671881 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671916 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.671953 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672014 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672057 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672093 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672125 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672161 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672166 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672196 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672231 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672266 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672302 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672335 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672370 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672404 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672441 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672472 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672507 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672542 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672576 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672612 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672646 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672679 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672717 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672751 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672785 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672819 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672855 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672889 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672953 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673021 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673060 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673102 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673170 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673207 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673253 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673291 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673336 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673384 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673418 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673451 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673489 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673526 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673690 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673715 4797 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673736 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673755 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673804 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673823 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673841 4797 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673876 4797 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673895 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673914 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673934 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673955 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673973 4797 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675624 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675655 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675675 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675710 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675732 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675751 4797 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675770 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675795 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675815 4797 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675835 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675854 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675876 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675894 4797 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675912 4797 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675933 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675951 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675971 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676024 4797 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676044 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676063 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676083 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676101 4797 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676120 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676138 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676162 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676182 4797 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676201 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676220 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676240 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676259 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676278 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676298 4797 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676319 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676340 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676359 4797 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676377 4797 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676397 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676417 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676436 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676458 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676478 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676499 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676517 4797 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676537 4797 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676555 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676574 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676594 4797 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676614 4797 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676634 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676654 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676672 4797 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676692 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676711 4797 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676729 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676748 4797 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676766 4797 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676787 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676811 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676829 4797 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676849 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676865 4797 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676883 4797 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676902 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681326 4797 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672336 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672497 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672650 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.672677 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.682023 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.673928 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674312 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674385 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674384 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674526 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674642 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.674723 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675041 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675206 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675356 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675778 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.675800 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676066 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676121 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676199 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676298 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676352 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676426 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676477 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676611 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676754 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676780 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.676933 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677103 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677142 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677375 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677454 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677583 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677622 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677659 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.677976 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.678012 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.678263 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.678348 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.678566 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.679507 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.679607 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.679860 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680175 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680256 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.680320 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680592 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680440 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680685 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680701 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680792 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680810 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681073 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681167 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681372 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681539 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.681874 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.680006 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.682597 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.682766 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.683972 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.684042 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.684093 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.685190 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:40.184847249 +0000 UTC m=+19.042033988 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.685398 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.685498 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:40.185486625 +0000 UTC m=+19.042673334 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.685566 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.685637 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686104 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686113 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686442 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686485 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686763 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686821 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.687941 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686906 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.688249 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.688629 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.688919 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.689126 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.689467 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.689781 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.685182 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690127 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690139 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.689825 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690382 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690588 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690971 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.690976 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691034 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691249 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691579 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691638 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691885 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.691977 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.692162 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:40.192096658 +0000 UTC m=+19.049283457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.692394 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.692445 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.692860 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.693117 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.686962 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.693960 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.694011 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.694063 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.694319 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.694401 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.694666 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.695541 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.696227 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.696460 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.696568 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.696734 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.697049 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.697437 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.697599 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.697774 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.699722 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.699775 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.699795 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.699880 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:40.19985403 +0000 UTC m=+19.057040809 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.704707 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.704741 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.704753 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[4797]: E0104 11:48:39.704838 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:40.204811709 +0000 UTC m=+19.061998498 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.705372 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.705436 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.705581 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.705693 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.706093 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.708250 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.709613 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.710017 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.710107 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.712125 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.713393 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.713654 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.718700 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.719716 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.724960 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.725963 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.732234 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777464 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777509 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777584 4797 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777602 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777616 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777630 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777641 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777652 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777663 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777674 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777685 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777696 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777705 4797 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777716 4797 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777598 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777733 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777728 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777830 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777847 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777858 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777868 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777890 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777899 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777908 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777917 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777927 4797 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777942 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777950 4797 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777959 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777968 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777976 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.777996 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778005 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778014 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778022 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778031 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778039 4797 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778048 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778056 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778065 4797 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778075 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778083 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778092 4797 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778102 4797 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778112 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778122 4797 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778137 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778149 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778161 4797 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778171 4797 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778181 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778189 4797 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778198 4797 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778206 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778214 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778223 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778231 4797 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778240 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778248 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778256 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778264 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778272 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778280 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778288 4797 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778296 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778305 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778313 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778320 4797 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778328 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778338 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778345 4797 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778353 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778362 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778372 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778381 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778391 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778399 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778406 4797 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778415 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778423 4797 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778431 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778439 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778447 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778455 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778464 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778473 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778481 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778491 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778500 4797 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778509 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778518 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778526 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778534 4797 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778542 4797 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778553 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778563 4797 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778572 4797 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778580 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778587 4797 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778597 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778605 4797 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778613 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778620 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778628 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778636 4797 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778644 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778652 4797 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778660 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778667 4797 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778675 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778684 4797 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778692 4797 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778700 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778708 4797 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778715 4797 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778723 4797 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778731 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778739 4797 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778747 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778755 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778763 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.778772 4797 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.929793 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:39 crc kubenswrapper[4797]: W0104 11:48:39.942202 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-74509c196d6d785e1e43f542fcafbe8dbed6c667be77c838be9e2ca5dd8b2854 WatchSource:0}: Error finding container 74509c196d6d785e1e43f542fcafbe8dbed6c667be77c838be9e2ca5dd8b2854: Status 404 returned error can't find the container with id 74509c196d6d785e1e43f542fcafbe8dbed6c667be77c838be9e2ca5dd8b2854 Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.944764 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:39 crc kubenswrapper[4797]: I0104 11:48:39.956380 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:39 crc kubenswrapper[4797]: W0104 11:48:39.968361 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-f04ac433b553efc17e4f1ca95e33021ad1135d00b90b4219628c1de3eea0683d WatchSource:0}: Error finding container f04ac433b553efc17e4f1ca95e33021ad1135d00b90b4219628c1de3eea0683d: Status 404 returned error can't find the container with id f04ac433b553efc17e4f1ca95e33021ad1135d00b90b4219628c1de3eea0683d Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.283591 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.283860 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:41.283814649 +0000 UTC m=+20.141001388 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.283952 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.284017 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.284042 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.284066 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284184 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284200 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284237 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284269 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:41.28423068 +0000 UTC m=+20.141417399 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284282 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284297 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284293 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284335 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284356 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:41.284281671 +0000 UTC m=+20.141468420 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284359 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284385 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:41.284373454 +0000 UTC m=+20.141560193 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.284479 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:41.284466256 +0000 UTC m=+20.141653005 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.420343 4797 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.473102 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.473216 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.473256 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:40 crc kubenswrapper[4797]: E0104 11:48:40.473438 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.599705 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f04ac433b553efc17e4f1ca95e33021ad1135d00b90b4219628c1de3eea0683d"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.601421 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.601443 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ec176f9bc67e8256df6289e7e3f5839e1645a0f651176525ba1e658d8bd93971"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.603525 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.603572 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.603599 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"74509c196d6d785e1e43f542fcafbe8dbed6c667be77c838be9e2ca5dd8b2854"} Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.624903 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.645464 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.672750 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.686838 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.707529 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.726936 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.743964 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.764120 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.780450 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.798967 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.819882 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.833345 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.848518 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[4797]: I0104 11:48:40.860928 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.074826 4797 csr.go:261] certificate signing request csr-67422 is approved, waiting to be issued Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.121410 4797 csr.go:257] certificate signing request csr-67422 is issued Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.292645 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.292721 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.292752 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.292777 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.292807 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.292920 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.292974 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293031 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293064 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293009 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.292971345 +0000 UTC m=+22.150158074 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293075 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293096 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293172 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293205 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293121 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.293097258 +0000 UTC m=+22.150284047 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293316 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.293290833 +0000 UTC m=+22.150477542 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293330 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.293324864 +0000 UTC m=+22.150511573 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.293342 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.293336054 +0000 UTC m=+22.150522763 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.308064 4797 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.308489 4797 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-diagnostics/events/network-check-source-55646444c4-trplf.188784bb8581c9d6\": read tcp 38.102.83.22:52924->38.102.83.22:6443: use of closed network connection" event="&Event{ObjectMeta:{network-check-source-55646444c4-trplf.188784bb8581c9d6 openshift-network-diagnostics 26299 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-network-diagnostics,Name:network-check-source-55646444c4-trplf,UID:9d751cbb-f2e2-430d-9754-c882a5e924a5,APIVersion:v1,ResourceVersion:25300,FieldPath:,},Reason:FailedMount,Message:MountVolume.SetUp failed for volume \"kube-api-access-s2dwl\" : [object \"openshift-network-diagnostics\"/\"kube-root-ca.crt\" not registered, object \"openshift-network-diagnostics\"/\"openshift-service-ca.crt\" not registered],Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:48:39 +0000 UTC,LastTimestamp:2026-01-04 11:48:41.293108678 +0000 UTC m=+20.150295387,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:48:41 crc kubenswrapper[4797]: W0104 11:48:41.308810 4797 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.473288 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.473419 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.476974 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.477497 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.478222 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.478819 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.479408 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.479851 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.480451 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.481054 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.481793 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.482445 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.483039 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.483803 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.484363 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.484927 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.486191 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.487897 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.488537 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.489700 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.490212 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.490916 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.492280 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.492819 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.494007 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.494537 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.496030 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.496545 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.496861 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.497308 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.498179 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.498725 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.499322 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.499835 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.500299 4797 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.500398 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.501708 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.502238 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.502645 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.503893 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.504610 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.508093 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.508818 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.509875 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.510378 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.513128 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.513764 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.514936 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.515502 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.516734 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.517387 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.518707 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.519378 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.519750 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.520346 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.520917 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.522210 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.523029 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.524163 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.530768 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2dbq6"] Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.531135 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.532086 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-xwctk"] Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.532343 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: W0104 11:48:41.534969 4797 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Jan 04 11:48:41 crc kubenswrapper[4797]: E0104 11:48:41.535036 4797 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.535675 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-fl747"] Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.535931 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.539574 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.540189 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.540707 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.540918 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543269 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543326 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543421 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543496 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543622 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543661 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543629 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543807 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.543816 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.559052 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.572107 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.583234 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.595942 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.613553 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.624716 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.635369 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.646906 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.657892 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.667902 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.685381 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.694863 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-hosts-file\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.694905 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-netns\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.694937 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-hostroot\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695044 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-daemon-config\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695064 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-kubelet\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695077 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-multus-certs\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695092 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-k8s-cni-cncf-io\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695106 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-bin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695121 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-multus\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695176 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76c57\" (UniqueName: \"kubernetes.io/projected/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-kube-api-access-76c57\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695231 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-cnibin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695252 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9f279bbd-812a-4617-b821-852c35954cb6-rootfs\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695273 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f279bbd-812a-4617-b821-852c35954cb6-proxy-tls\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695294 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-os-release\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695311 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-cni-binary-copy\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695331 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f279bbd-812a-4617-b821-852c35954cb6-mcd-auth-proxy-config\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695350 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-conf-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695380 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695399 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-socket-dir-parent\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695421 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gczkd\" (UniqueName: \"kubernetes.io/projected/91fac858-36ec-4a4b-ba0d-014f6b96b421-kube-api-access-gczkd\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695441 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkzss\" (UniqueName: \"kubernetes.io/projected/9f279bbd-812a-4617-b821-852c35954cb6-kube-api-access-mkzss\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695464 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-system-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.695485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-etc-kubernetes\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.701099 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.722601 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796375 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-k8s-cni-cncf-io\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796423 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-bin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796438 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-multus\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796454 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-multus-certs\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796481 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76c57\" (UniqueName: \"kubernetes.io/projected/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-kube-api-access-76c57\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796497 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-cnibin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796513 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9f279bbd-812a-4617-b821-852c35954cb6-rootfs\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796527 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f279bbd-812a-4617-b821-852c35954cb6-proxy-tls\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796541 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-os-release\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796555 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-cni-binary-copy\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796569 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f279bbd-812a-4617-b821-852c35954cb6-mcd-auth-proxy-config\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796595 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796609 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-socket-dir-parent\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796624 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-conf-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796642 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-system-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796657 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-etc-kubernetes\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796681 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gczkd\" (UniqueName: \"kubernetes.io/projected/91fac858-36ec-4a4b-ba0d-014f6b96b421-kube-api-access-gczkd\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796701 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkzss\" (UniqueName: \"kubernetes.io/projected/9f279bbd-812a-4617-b821-852c35954cb6-kube-api-access-mkzss\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-multus\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796716 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-cni-bin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796719 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-hosts-file\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796771 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-hosts-file\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796795 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-hostroot\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796817 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-multus-certs\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796871 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-netns\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796896 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-daemon-config\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796922 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-kubelet\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797014 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-var-lib-kubelet\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797047 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-hostroot\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797076 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-netns\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797204 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-conf-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.796557 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-host-run-k8s-cni-cncf-io\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797389 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797411 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f279bbd-812a-4617-b821-852c35954cb6-mcd-auth-proxy-config\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797431 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-socket-dir-parent\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797457 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-system-cni-dir\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797479 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-etc-kubernetes\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797792 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9f279bbd-812a-4617-b821-852c35954cb6-rootfs\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797872 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-multus-daemon-config\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797912 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/91fac858-36ec-4a4b-ba0d-014f6b96b421-cni-binary-copy\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.797934 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-os-release\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.798213 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/91fac858-36ec-4a4b-ba0d-014f6b96b421-cnibin\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.802166 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f279bbd-812a-4617-b821-852c35954cb6-proxy-tls\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.815871 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76c57\" (UniqueName: \"kubernetes.io/projected/3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f-kube-api-access-76c57\") pod \"node-resolver-fl747\" (UID: \"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\") " pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.815885 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkzss\" (UniqueName: \"kubernetes.io/projected/9f279bbd-812a-4617-b821-852c35954cb6-kube-api-access-mkzss\") pod \"machine-config-daemon-2dbq6\" (UID: \"9f279bbd-812a-4617-b821-852c35954cb6\") " pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.848399 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:48:41 crc kubenswrapper[4797]: W0104 11:48:41.860334 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f279bbd_812a_4617_b821_852c35954cb6.slice/crio-98e875fe546a09ff2b0de24dd10a2af6997852e7af8467cb964d280f391bcda1 WatchSource:0}: Error finding container 98e875fe546a09ff2b0de24dd10a2af6997852e7af8467cb964d280f391bcda1: Status 404 returned error can't find the container with id 98e875fe546a09ff2b0de24dd10a2af6997852e7af8467cb964d280f391bcda1 Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.861252 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fl747" Jan 04 11:48:41 crc kubenswrapper[4797]: W0104 11:48:41.889059 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fd7fc2a_4958_4faf_b6fc_fb75c0553d7f.slice/crio-8b11a5bc9db3b50ee983a07af5af8d12a2c1ee7d260c6f686caf12059611fc36 WatchSource:0}: Error finding container 8b11a5bc9db3b50ee983a07af5af8d12a2c1ee7d260c6f686caf12059611fc36: Status 404 returned error can't find the container with id 8b11a5bc9db3b50ee983a07af5af8d12a2c1ee7d260c6f686caf12059611fc36 Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.916182 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-kcsbk"] Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.916737 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.918871 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.920425 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.937266 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.970166 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.986115 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998567 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-os-release\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998655 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998677 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998694 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvzql\" (UniqueName: \"kubernetes.io/projected/3287e642-dc09-4bbe-91c4-02904aa821de-kube-api-access-pvzql\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998723 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-cnibin\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998788 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-system-cni-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:41 crc kubenswrapper[4797]: I0104 11:48:41.998839 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-binary-copy\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.003169 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.033390 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.055709 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.071288 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.089310 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099468 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099501 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099518 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvzql\" (UniqueName: \"kubernetes.io/projected/3287e642-dc09-4bbe-91c4-02904aa821de-kube-api-access-pvzql\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099537 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-cnibin\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099557 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-system-cni-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099572 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-binary-copy\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099591 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-os-release\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099651 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-cnibin\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099697 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-system-cni-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099765 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-os-release\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.099789 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3287e642-dc09-4bbe-91c4-02904aa821de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.100353 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-binary-copy\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.100431 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3287e642-dc09-4bbe-91c4-02904aa821de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.103091 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.122140 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.122546 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-04 11:43:41 +0000 UTC, rotation deadline is 2026-10-21 10:14:24.739121452 +0000 UTC Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.122605 4797 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6958h25m42.616519721s for next certificate rotation Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.134399 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.317205 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-thvnv"] Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.319431 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.322139 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.322269 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.322330 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.322518 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.322551 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.323970 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.324827 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.334723 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.348060 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.392483 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402281 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402314 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402329 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402346 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402362 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402388 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402401 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402422 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402436 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402467 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402481 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402501 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402514 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402531 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62nns\" (UniqueName: \"kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402547 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402565 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402579 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402592 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402606 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.402621 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.419026 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.431213 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.440376 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvzql\" (UniqueName: \"kubernetes.io/projected/3287e642-dc09-4bbe-91c4-02904aa821de-kube-api-access-pvzql\") pod \"multus-additional-cni-plugins-kcsbk\" (UID: \"3287e642-dc09-4bbe-91c4-02904aa821de\") " pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.442921 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.443177 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gczkd\" (UniqueName: \"kubernetes.io/projected/91fac858-36ec-4a4b-ba0d-014f6b96b421-kube-api-access-gczkd\") pod \"multus-xwctk\" (UID: \"91fac858-36ec-4a4b-ba0d-014f6b96b421\") " pod="openshift-multus/multus-xwctk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.456211 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xwctk" Jan 04 11:48:42 crc kubenswrapper[4797]: W0104 11:48:42.468753 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91fac858_36ec_4a4b_ba0d_014f6b96b421.slice/crio-788936c63147e90890099760e02cce3965f1edf2bc900ea11d0766ff308950e6 WatchSource:0}: Error finding container 788936c63147e90890099760e02cce3965f1edf2bc900ea11d0766ff308950e6: Status 404 returned error can't find the container with id 788936c63147e90890099760e02cce3965f1edf2bc900ea11d0766ff308950e6 Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.473207 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.473306 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.473215 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.473385 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.478657 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.495136 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.503945 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504008 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504028 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504050 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504071 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504070 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504089 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504114 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504131 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504137 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504153 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504177 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504190 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504211 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504249 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504263 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504261 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504204 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504263 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504356 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504397 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504417 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504427 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504435 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504457 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504483 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504522 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504538 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504558 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504576 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504583 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504596 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62nns\" (UniqueName: \"kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504587 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504613 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504642 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504688 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.504854 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.505284 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.508421 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.508517 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.523633 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.526446 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.536609 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62nns\" (UniqueName: \"kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns\") pod \"ovnkube-node-thvnv\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: W0104 11:48:42.539208 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3287e642_dc09_4bbe_91c4_02904aa821de.slice/crio-0643e28c468270e0529fcd291b0ed36802116b06f9dbdbf6b63696c3cf4f51f6 WatchSource:0}: Error finding container 0643e28c468270e0529fcd291b0ed36802116b06f9dbdbf6b63696c3cf4f51f6: Status 404 returned error can't find the container with id 0643e28c468270e0529fcd291b0ed36802116b06f9dbdbf6b63696c3cf4f51f6 Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.544922 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.559889 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.573221 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.610372 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.612249 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fl747" event={"ID":"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f","Type":"ContainerStarted","Data":"9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.612275 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fl747" event={"ID":"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f","Type":"ContainerStarted","Data":"8b11a5bc9db3b50ee983a07af5af8d12a2c1ee7d260c6f686caf12059611fc36"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.613712 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.613734 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.613743 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"98e875fe546a09ff2b0de24dd10a2af6997852e7af8467cb964d280f391bcda1"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.615403 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerStarted","Data":"788936c63147e90890099760e02cce3965f1edf2bc900ea11d0766ff308950e6"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.617508 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerStarted","Data":"0643e28c468270e0529fcd291b0ed36802116b06f9dbdbf6b63696c3cf4f51f6"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.625599 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.632257 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.641982 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.655866 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.666157 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.676105 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.688505 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.698386 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.707724 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.719440 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.729324 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.739034 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.742093 4797 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.743543 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.743581 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.743603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.744225 4797 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.755039 4797 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.755313 4797 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.756642 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.756708 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.756727 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.756753 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.756780 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.768175 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.774777 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.780787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.780820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.780828 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.780859 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.780867 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.784736 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.794972 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.798196 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.798223 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.798234 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.798250 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.798282 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.801367 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.809054 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.811884 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.811931 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.811941 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.811952 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.811963 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.812388 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.822028 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.822936 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.826095 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.826132 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.826141 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.826172 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.826180 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.836249 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.843481 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: E0104 11:48:42.843611 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.845165 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.845219 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.845238 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.845264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.845284 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.853027 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.866511 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.879226 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.890894 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.901882 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.911063 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.918418 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.947746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.947780 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.947790 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.947804 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[4797]: I0104 11:48:42.947816 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.049606 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.049657 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.049675 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.049696 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.049712 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.152969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.153029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.153044 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.153063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.153093 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.255741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.256026 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.256037 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.256049 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.256057 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.312518 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.312620 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.312645 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.312667 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.312694 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312716 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:47.312693097 +0000 UTC m=+26.169879806 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312762 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312799 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:47.312790729 +0000 UTC m=+26.169977438 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312813 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312826 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312839 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312867 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:47.312858171 +0000 UTC m=+26.170044890 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312872 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312885 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312894 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312920 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312920 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:47.312912532 +0000 UTC m=+26.170099251 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.312954 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:47.312946193 +0000 UTC m=+26.170132902 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.363888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.363981 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.364015 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.364033 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.364045 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.465782 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.465844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.465861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.465884 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.465903 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.473100 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:43 crc kubenswrapper[4797]: E0104 11:48:43.473270 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.568618 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.568662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.568673 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.568689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.568699 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.621286 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044" exitCode=0 Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.621344 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.621387 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"98e2447b3d2f2f21a94a5a6b86c8fc59340ca70a687849b9352b4719f19e193b"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.622674 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerStarted","Data":"f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.624677 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a" exitCode=0 Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.625101 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.638173 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.650260 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.660478 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.672515 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.672541 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.672549 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.672560 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.672569 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.678498 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.693908 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.705817 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.724725 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.736646 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.748657 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.763973 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.774641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.774672 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.774681 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.774694 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.774703 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.779068 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.791417 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.804743 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.816103 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.827358 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.840155 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.859482 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.865561 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-m5fj2"] Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.866089 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.868721 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.868800 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.868955 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.869601 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.876690 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.876719 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.876728 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.876741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.876750 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.879583 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.894313 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.916153 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.937822 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.960221 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.979245 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.979283 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.979293 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.979313 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.979324 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[4797]: I0104 11:48:43.990478 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.004256 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.018467 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.019634 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2gwl\" (UniqueName: \"kubernetes.io/projected/a83ea4c4-a12c-4ad5-868e-cc0f09576858-kube-api-access-b2gwl\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.019699 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a83ea4c4-a12c-4ad5-868e-cc0f09576858-host\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.019717 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a83ea4c4-a12c-4ad5-868e-cc0f09576858-serviceca\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.034780 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.047938 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.063161 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.074155 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.082260 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.082305 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.082318 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.082342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.082358 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.085488 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.102603 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.116812 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.120836 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a83ea4c4-a12c-4ad5-868e-cc0f09576858-host\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.120864 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a83ea4c4-a12c-4ad5-868e-cc0f09576858-serviceca\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.120889 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2gwl\" (UniqueName: \"kubernetes.io/projected/a83ea4c4-a12c-4ad5-868e-cc0f09576858-kube-api-access-b2gwl\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.120972 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a83ea4c4-a12c-4ad5-868e-cc0f09576858-host\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.121892 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a83ea4c4-a12c-4ad5-868e-cc0f09576858-serviceca\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.127304 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.138203 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2gwl\" (UniqueName: \"kubernetes.io/projected/a83ea4c4-a12c-4ad5-868e-cc0f09576858-kube-api-access-b2gwl\") pod \"node-ca-m5fj2\" (UID: \"a83ea4c4-a12c-4ad5-868e-cc0f09576858\") " pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.140011 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.151258 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184035 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184313 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184344 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184352 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184365 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.184374 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.193822 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-m5fj2" Jan 04 11:48:44 crc kubenswrapper[4797]: W0104 11:48:44.209151 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda83ea4c4_a12c_4ad5_868e_cc0f09576858.slice/crio-ec35aa038b2b2c18d6e3676149969e1bd3c9f5d1630300ab30b89259ec73f0aa WatchSource:0}: Error finding container ec35aa038b2b2c18d6e3676149969e1bd3c9f5d1630300ab30b89259ec73f0aa: Status 404 returned error can't find the container with id ec35aa038b2b2c18d6e3676149969e1bd3c9f5d1630300ab30b89259ec73f0aa Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.223894 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.287100 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.287140 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.287151 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.287211 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.287224 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.390741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.390771 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.390779 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.390793 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.390803 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.473848 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.473887 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:44 crc kubenswrapper[4797]: E0104 11:48:44.474247 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:44 crc kubenswrapper[4797]: E0104 11:48:44.474374 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.493825 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.493886 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.493904 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.493927 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.493951 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.597650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.597695 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.597707 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.597724 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.597737 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.602444 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.608731 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.614909 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.620468 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.632578 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-m5fj2" event={"ID":"a83ea4c4-a12c-4ad5-868e-cc0f09576858","Type":"ContainerStarted","Data":"1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.632654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-m5fj2" event={"ID":"a83ea4c4-a12c-4ad5-868e-cc0f09576858","Type":"ContainerStarted","Data":"ec35aa038b2b2c18d6e3676149969e1bd3c9f5d1630300ab30b89259ec73f0aa"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639287 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639333 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639347 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639359 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639370 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.639382 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.644040 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a" exitCode=0 Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.644139 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.644265 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.664711 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.686795 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.700428 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.700474 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.700486 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.700503 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.700515 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.707243 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.723259 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.742315 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.763262 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.777329 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.798247 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.804279 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.804342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.804362 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.804388 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.804409 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.817069 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.833418 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.862246 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.885649 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.903641 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.909042 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.909100 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.909122 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.909147 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.909164 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.929585 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.946237 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[4797]: I0104 11:48:44.965808 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.004073 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.012093 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.012144 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.012156 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.012174 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.012187 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.047542 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.084089 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.114962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.115036 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.115053 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.115078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.115094 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.124722 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.172567 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.204468 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.218155 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.218250 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.218276 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.218303 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.218324 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.243928 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.291856 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.321187 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.321265 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.321289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.321317 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.321339 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.322902 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.423939 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.424029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.424047 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.424071 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.424088 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.473832 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:45 crc kubenswrapper[4797]: E0104 11:48:45.474343 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.527162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.527235 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.527259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.527298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.527322 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.629920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.629953 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.629963 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.629978 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.630005 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.650632 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7" exitCode=0 Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.650692 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.664494 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.685794 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.699379 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.709873 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.724058 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.732639 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.732772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.732860 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.732944 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.733043 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.737269 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.749317 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.762960 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.776759 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.786857 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.803257 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.813150 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.834896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.834926 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.834936 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.834949 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.834957 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.843057 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.883458 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.937521 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.937558 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.937570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.937587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[4797]: I0104 11:48:45.937598 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.039339 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.039389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.039401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.039417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.039432 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.142336 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.142392 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.142410 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.142433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.142451 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.245522 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.245583 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.245602 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.245626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.245644 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.349449 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.349503 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.349521 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.350243 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.350278 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.453264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.453318 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.453337 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.453361 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.453378 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.473657 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.473676 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:46 crc kubenswrapper[4797]: E0104 11:48:46.473832 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:46 crc kubenswrapper[4797]: E0104 11:48:46.474040 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.555043 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.555321 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.555402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.555478 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.555579 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.656496 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf" exitCode=0 Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.656574 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.657292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.657334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.657346 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.657365 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.657376 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.661979 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.676409 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.689570 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.706589 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.723146 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.742527 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.754146 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.759632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.759678 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.759695 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.759718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.759737 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.772099 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.783222 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.801239 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.817290 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.829497 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.847163 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.863381 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.863414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.863422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.863435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.863446 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.866683 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.879303 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:46Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.965994 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.966020 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.966028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.966040 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[4797]: I0104 11:48:46.966048 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.068486 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.068531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.068546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.068568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.068583 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.172747 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.172799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.172816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.172841 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.172859 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.275078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.275140 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.275162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.275193 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.275217 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.351896 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.352093 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.352132 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.352166 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.352231 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.352369 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.352438 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.352416277 +0000 UTC m=+34.209603036 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.352915 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.352898979 +0000 UTC m=+34.210085728 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353055 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353079 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353096 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353144 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.353127325 +0000 UTC m=+34.210314064 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353217 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353234 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353248 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353284 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.353272169 +0000 UTC m=+34.210458918 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353356 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.353394 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.353382532 +0000 UTC m=+34.210569281 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.377051 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.377120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.377138 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.377161 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.377178 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.474034 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:47 crc kubenswrapper[4797]: E0104 11:48:47.474443 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.484450 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.484497 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.484528 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.484551 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.484567 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.587301 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.587343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.587355 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.587372 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.587384 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.667420 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787" exitCode=0 Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.667466 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.681508 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.689484 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.689518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.689529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.689546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.689559 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.692340 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.703723 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.714572 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.725332 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.737133 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.747860 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.759639 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.775330 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.783500 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.791698 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.791735 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.791764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.791791 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.791801 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.794241 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.809676 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.826061 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.839756 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:47Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.894745 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.894799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.894816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.894840 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.894857 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.997158 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.997206 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.997222 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.997245 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[4797]: I0104 11:48:47.997264 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.099704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.099761 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.099787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.099835 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.099859 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.202898 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.202957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.202974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.203036 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.203061 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.306409 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.306478 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.306499 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.306525 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.306546 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.409085 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.409146 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.409163 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.409188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.409206 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.473109 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.473145 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:48 crc kubenswrapper[4797]: E0104 11:48:48.473276 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:48 crc kubenswrapper[4797]: E0104 11:48:48.473425 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.544366 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.544404 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.544417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.544433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.544446 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.647287 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.647624 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.647640 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.647661 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.647675 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.677983 4797 generic.go:334] "Generic (PLEG): container finished" podID="3287e642-dc09-4bbe-91c4-02904aa821de" containerID="8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382" exitCode=0 Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.678029 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerDied","Data":"8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.693420 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.705585 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.718200 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.730497 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.741923 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.751195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.751230 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.751241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.751257 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.751267 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.763415 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.774085 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.789900 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.801939 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.810316 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.825455 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.839784 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.851931 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.853881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.853913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.853924 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.853938 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.853951 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.864917 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.956818 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.956888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.956911 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.956943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[4797]: I0104 11:48:48.956969 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.059967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.060071 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.060089 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.060115 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.060134 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.163334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.163395 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.163412 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.163442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.163462 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.266572 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.266637 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.266655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.266679 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.266699 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.369927 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.369984 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.370039 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.370068 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.370089 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473206 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473252 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: E0104 11:48:49.473402 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473426 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473449 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.473470 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.577638 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.577697 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.577716 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.577747 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.577766 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.680979 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.681063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.681080 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.681104 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.681121 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.784751 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.784816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.784881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.784913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.784935 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.887385 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.887419 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.887432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.887451 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.887466 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.989844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.989878 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.989886 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.989899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[4797]: I0104 11:48:49.989907 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.092523 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.092594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.092612 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.092634 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.092647 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.194693 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.194748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.194765 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.194788 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.194816 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.297755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.297799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.297810 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.297831 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.297844 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.400579 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.400651 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.400671 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.401181 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.401232 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.474117 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.474185 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:50 crc kubenswrapper[4797]: E0104 11:48:50.474297 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:50 crc kubenswrapper[4797]: E0104 11:48:50.474443 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.504334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.504415 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.504442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.504473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.504496 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.607517 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.607593 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.607622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.607650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.607703 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.696933 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.700896 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.701104 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.710331 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.710379 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.710397 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.710421 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.710437 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.711439 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" event={"ID":"3287e642-dc09-4bbe-91c4-02904aa821de","Type":"ContainerStarted","Data":"6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.721215 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.735037 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.735968 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.737127 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.750266 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.762761 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.779171 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.796772 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.811063 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.813134 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.813181 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.813194 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.813214 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.813225 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.826053 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.842020 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.857625 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.871827 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.887311 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.902842 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.914117 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.917021 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.917073 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.917090 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.917111 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.917126 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.927117 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.938819 4797 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.939440 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.957457 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.972052 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.984258 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:50 crc kubenswrapper[4797]: I0104 11:48:50.999457 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.013058 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.019858 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.019920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.019935 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.019955 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.019967 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.028805 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.042241 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.056463 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.068588 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.084096 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.096492 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.114624 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.122389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.122444 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.122463 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.122490 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.122512 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.225362 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.225442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.225468 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.225499 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.225522 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.328626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.328688 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.328713 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.328744 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.328767 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.431018 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.431079 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.431089 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.431102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.431111 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.473947 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:51 crc kubenswrapper[4797]: E0104 11:48:51.474141 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.484193 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.501158 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.518391 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.531219 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.532974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.533046 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.533059 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.533076 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.533092 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.550300 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.570275 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.590730 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.618182 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.634846 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.634881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.634892 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.634910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.634922 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.637632 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.648673 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.666440 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.683591 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.700699 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.714880 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.718421 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.737238 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.737280 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.737292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.737311 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.737323 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.840443 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.840772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.840783 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.840799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.840816 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.943509 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.943553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.943615 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.943638 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[4797]: I0104 11:48:51.943655 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.046197 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.046330 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.046351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.046377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.046395 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.155359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.155398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.155417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.155435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.155448 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.258262 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.258333 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.258391 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.258420 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.258445 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.361234 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.361294 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.361314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.361342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.361362 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.464056 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.464125 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.464143 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.464170 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.464188 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.473379 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.473384 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:52 crc kubenswrapper[4797]: E0104 11:48:52.473587 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:52 crc kubenswrapper[4797]: E0104 11:48:52.473754 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.567483 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.567564 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.567588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.567619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.567646 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.670277 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.670320 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.670334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.670353 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.670366 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.723062 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/0.log" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.726866 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2" exitCode=1 Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.726911 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.727753 4797 scope.go:117] "RemoveContainer" containerID="910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.745948 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.763104 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.772967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.773173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.773208 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.773230 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.773245 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.777451 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.799553 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.814271 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.828271 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.844514 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.874674 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:52Z\\\",\\\"message\\\":\\\"2 6063 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090113 6063 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:52.090254 6063 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090310 6063 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090450 6063 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:52.090871 6063 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:52.090895 6063 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:52.090923 6063 factory.go:656] Stopping watch factory\\\\nI0104 11:48:52.090930 6063 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:52.090964 6063 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.875617 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.875676 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.875687 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.875700 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.875709 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.889473 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.908230 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.921298 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.935542 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.953226 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.965559 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:52Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.978289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.978350 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.978367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.978393 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[4797]: I0104 11:48:52.978410 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.080262 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.080287 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.080298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.080314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.080325 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.086440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.086459 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.086469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.086483 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.086493 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.097161 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.100461 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.100485 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.100497 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.100513 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.100525 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.116210 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.120686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.120725 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.120737 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.120753 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.120770 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.134390 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.138174 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.138218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.138235 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.138259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.138277 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.154229 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.158517 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.158555 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.158567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.158586 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.158598 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.178363 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.178538 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.183295 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.183347 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.183359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.183377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.183389 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.285390 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.285422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.285433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.285450 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.285461 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.386902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.386960 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.386981 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.387035 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.387054 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.473786 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:53 crc kubenswrapper[4797]: E0104 11:48:53.473943 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.489911 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.489962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.489978 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.490030 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.490048 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.592478 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.592519 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.592530 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.592546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.592558 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.695748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.695804 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.695822 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.695850 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.695867 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.734093 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/0.log" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.738056 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.738220 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.759511 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.784378 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.798509 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.798557 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.798568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.798586 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.798599 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.814157 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.831889 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.849507 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.864494 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.875639 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.894778 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.901601 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.901637 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.901649 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.901674 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.901686 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.908673 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.927321 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.943965 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.964863 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.986113 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:52Z\\\",\\\"message\\\":\\\"2 6063 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090113 6063 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:52.090254 6063 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090310 6063 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090450 6063 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:52.090871 6063 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:52.090895 6063 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:52.090923 6063 factory.go:656] Stopping watch factory\\\\nI0104 11:48:52.090930 6063 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:52.090964 6063 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:53 crc kubenswrapper[4797]: I0104 11:48:53.998532 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:53Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.004511 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.004605 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.004663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.004722 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.004805 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.107205 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.107414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.107471 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.107546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.107601 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.210274 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.210573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.210738 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.210868 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.211056 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.314189 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.314226 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.314235 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.314249 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.314258 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.417542 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.417611 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.417631 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.417656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.417676 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.473617 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:54 crc kubenswrapper[4797]: E0104 11:48:54.473790 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.474359 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:54 crc kubenswrapper[4797]: E0104 11:48:54.474516 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.520274 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.520354 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.520378 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.520411 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.520436 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.623188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.623528 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.623663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.623806 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.623921 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.728067 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.728329 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.728449 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.728570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.728777 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.744549 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/1.log" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.745651 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/0.log" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.749964 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" exitCode=1 Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.750174 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.750374 4797 scope.go:117] "RemoveContainer" containerID="910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.752698 4797 scope.go:117] "RemoveContainer" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" Jan 04 11:48:54 crc kubenswrapper[4797]: E0104 11:48:54.753419 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.780199 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.794494 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.810295 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.825309 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.831620 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.831835 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.831971 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.832138 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.832234 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.841813 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.872818 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:52Z\\\",\\\"message\\\":\\\"2 6063 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090113 6063 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:52.090254 6063 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090310 6063 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090450 6063 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:52.090871 6063 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:52.090895 6063 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:52.090923 6063 factory.go:656] Stopping watch factory\\\\nI0104 11:48:52.090930 6063 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:52.090964 6063 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.893271 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.911514 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.927686 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.935242 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.935439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.935456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.935473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.935486 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.945918 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.965117 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.982579 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[4797]: I0104 11:48:54.997905 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.010608 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.037976 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.038155 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.038212 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.038647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.038748 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.140982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.141254 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.141334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.141394 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.141499 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.244531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.244579 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.244598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.244621 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.244639 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.253062 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv"] Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.253652 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: W0104 11:48:55.256056 4797 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd": failed to list *v1.Secret: secrets "ovn-kubernetes-control-plane-dockercfg-gs7dd" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.256260 4797 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-control-plane-dockercfg-gs7dd\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-control-plane-dockercfg-gs7dd\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 04 11:48:55 crc kubenswrapper[4797]: W0104 11:48:55.256963 4797 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert": failed to list *v1.Secret: secrets "ovn-control-plane-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.257064 4797 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-control-plane-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-control-plane-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.279411 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.279476 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.279578 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dwrk\" (UniqueName: \"kubernetes.io/projected/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-kube-api-access-9dwrk\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.279616 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.280749 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.300166 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.315938 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.338448 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.347600 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.347675 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.347697 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.347721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.347739 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.355323 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.375909 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380036 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.380197 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.380163147 +0000 UTC m=+50.237349886 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380263 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380321 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380360 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380406 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380438 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.380470 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.381163 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dwrk\" (UniqueName: \"kubernetes.io/projected/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-kube-api-access-9dwrk\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.380624 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.381260 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381351 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.381321467 +0000 UTC m=+50.238508236 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.380712 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.381420 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.380792 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381545 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381039 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381574 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381611 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381634 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381444 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.381423179 +0000 UTC m=+50.238609968 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381687 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.381658826 +0000 UTC m=+50.238845645 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.381722 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.381703137 +0000 UTC m=+50.238889986 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.381835 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.396178 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.420562 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.422860 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dwrk\" (UniqueName: \"kubernetes.io/projected/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-kube-api-access-9dwrk\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.451565 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.451945 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.452192 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.452396 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.452592 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.454397 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://910060ee16432098407908655c8cd0ef3f163b5c97b6e2943ecd33fe4a6ac5e2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:52Z\\\",\\\"message\\\":\\\"2 6063 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090113 6063 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:52.090254 6063 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090310 6063 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:52.090450 6063 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:52.090871 6063 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:52.090895 6063 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:52.090923 6063 factory.go:656] Stopping watch factory\\\\nI0104 11:48:52.090930 6063 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:52.090964 6063 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.472281 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.474330 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.474509 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.492196 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.517556 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.538259 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.558647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.558959 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.559211 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.559368 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.558981 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.559506 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.581840 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.663598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.663675 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.663702 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.663732 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.663754 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.755298 4797 scope.go:117] "RemoveContainer" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" Jan 04 11:48:55 crc kubenswrapper[4797]: E0104 11:48:55.755558 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.769567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.769619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.769636 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.769660 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.769677 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.776623 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.799780 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.818260 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.834126 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.857664 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872674 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872699 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872718 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.872916 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.890022 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.906296 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.941709 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.964173 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.975398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.975459 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.975477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.975507 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.975529 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[4797]: I0104 11:48:55.984125 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.001179 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.010404 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-v8lzg"] Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.010887 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.010954 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.018676 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.035874 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.053429 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.065100 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.077731 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.077960 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.078045 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.078060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.078078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.078092 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.088476 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.088572 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvn4p\" (UniqueName: \"kubernetes.io/projected/4c264f05-2fcc-422e-a717-d766b27bfd5b-kube-api-access-mvn4p\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.095113 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.105800 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.117294 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.131779 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.144523 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.158962 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.181421 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.181477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.181495 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.181518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.181535 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.185214 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.189166 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.189231 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvn4p\" (UniqueName: \"kubernetes.io/projected/4c264f05-2fcc-422e-a717-d766b27bfd5b-kube-api-access-mvn4p\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.189702 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.189823 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:56.689807091 +0000 UTC m=+35.546993800 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.207959 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvn4p\" (UniqueName: \"kubernetes.io/projected/4c264f05-2fcc-422e-a717-d766b27bfd5b-kube-api-access-mvn4p\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.216237 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.229690 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.243472 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.258645 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.277446 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.284344 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.284610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.284706 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.284794 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.284886 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.292104 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.311033 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.382310 4797 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-control-plane-metrics-cert: failed to sync secret cache: timed out waiting for the condition Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.382507 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert podName:f8f4bf89-d69a-4f1e-b0e0-464a906d8c02 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:56.882464575 +0000 UTC m=+35.739651324 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-control-plane-metrics-cert" (UniqueName: "kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert") pod "ovnkube-control-plane-749d76644c-fptjv" (UID: "f8f4bf89-d69a-4f1e-b0e0-464a906d8c02") : failed to sync secret cache: timed out waiting for the condition Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.387875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.387928 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.387946 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.387970 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.388014 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.473747 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.473772 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.473935 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.474060 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.490887 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.490957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.490975 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.491022 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.491041 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.566943 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.594210 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.594285 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.594309 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.594341 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.594365 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.694049 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.694229 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.694315 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:57.694292907 +0000 UTC m=+36.551479656 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.696898 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.696962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.696981 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.697033 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.697051 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.750729 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.758565 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/1.log" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.798971 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.799065 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.799088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.799117 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.799138 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.861934 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.863526 4797 scope.go:117] "RemoveContainer" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" Jan 04 11:48:56 crc kubenswrapper[4797]: E0104 11:48:56.863869 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.896885 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.901796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.901844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.901862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.901886 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.901903 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[4797]: I0104 11:48:56.902614 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f8f4bf89-d69a-4f1e-b0e0-464a906d8c02-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fptjv\" (UID: \"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.005445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.005501 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.005520 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.005544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.005562 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.077070 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" Jan 04 11:48:57 crc kubenswrapper[4797]: W0104 11:48:57.098310 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8f4bf89_d69a_4f1e_b0e0_464a906d8c02.slice/crio-d82e8b3b030df22bbc29375c17b26982d3aba303cf3f65e13a968339ae23a723 WatchSource:0}: Error finding container d82e8b3b030df22bbc29375c17b26982d3aba303cf3f65e13a968339ae23a723: Status 404 returned error can't find the container with id d82e8b3b030df22bbc29375c17b26982d3aba303cf3f65e13a968339ae23a723 Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.107218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.107408 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.107507 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.107588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.107671 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.211435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.211489 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.211508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.211533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.211551 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.314432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.314494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.314514 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.314540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.314558 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.419090 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.419446 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.419465 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.419489 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.419508 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.474223 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.474231 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:57 crc kubenswrapper[4797]: E0104 11:48:57.474468 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:57 crc kubenswrapper[4797]: E0104 11:48:57.474398 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.522153 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.522202 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.522219 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.522242 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.522258 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.626085 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.626204 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.626280 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.626304 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.626321 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.706636 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:57 crc kubenswrapper[4797]: E0104 11:48:57.706885 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:57 crc kubenswrapper[4797]: E0104 11:48:57.707029 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:59.706950046 +0000 UTC m=+38.564136795 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.729460 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.729508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.729523 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.729542 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.729556 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.767902 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" event={"ID":"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02","Type":"ContainerStarted","Data":"67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.768204 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" event={"ID":"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02","Type":"ContainerStarted","Data":"6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.768307 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" event={"ID":"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02","Type":"ContainerStarted","Data":"d82e8b3b030df22bbc29375c17b26982d3aba303cf3f65e13a968339ae23a723"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.782774 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.799040 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.817108 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.832742 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.832977 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.833099 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.833203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.833285 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.834184 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.848077 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.866656 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.882726 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.899907 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.913449 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.934909 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.936055 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.936104 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.936117 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.936135 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.936148 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.949945 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.962919 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.977359 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:57 crc kubenswrapper[4797]: I0104 11:48:57.996665 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:57Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.014715 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.038936 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.039156 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.039246 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.039333 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.039434 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.043362 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.142419 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.142485 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.142522 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.142554 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.142575 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.245649 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.245710 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.245729 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.245754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.245786 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.349058 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.349118 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.349140 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.349167 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.349185 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.452654 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.452714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.452731 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.452754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.452771 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.473462 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.473489 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:58 crc kubenswrapper[4797]: E0104 11:48:58.473642 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:58 crc kubenswrapper[4797]: E0104 11:48:58.473796 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.556083 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.556429 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.556650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.556810 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.556952 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.660594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.660650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.660670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.660693 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.660711 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.763879 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.763938 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.763957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.763981 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.764021 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.867403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.867798 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.868130 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.868656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.868947 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.972888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.972960 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.972979 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.973030 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[4797]: I0104 11:48:58.973052 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.076072 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.076135 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.076153 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.076177 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.076196 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.180150 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.180220 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.180241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.180269 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.180289 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.283620 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.283677 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.283697 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.283728 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.283750 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.397501 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.397567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.397583 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.397603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.397617 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.473946 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.474065 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:59 crc kubenswrapper[4797]: E0104 11:48:59.474120 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:48:59 crc kubenswrapper[4797]: E0104 11:48:59.474255 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.499571 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.499626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.499643 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.499663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.499682 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.602891 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.602957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.602974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.603028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.603046 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.706953 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.707040 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.707059 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.707081 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.707098 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.728876 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:48:59 crc kubenswrapper[4797]: E0104 11:48:59.729176 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:59 crc kubenswrapper[4797]: E0104 11:48:59.729273 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:03.729249554 +0000 UTC m=+42.586436303 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.810495 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.810571 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.810593 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.810620 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.810642 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.914238 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.914301 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.914321 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.914351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[4797]: I0104 11:48:59.914373 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.017218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.017289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.017312 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.017343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.017366 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.120929 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.120975 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.121021 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.121045 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.121063 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.223816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.223879 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.223902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.223951 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.223972 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.326027 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.326066 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.326076 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.326094 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.326106 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.429297 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.429646 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.429824 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.429969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.430146 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.473602 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.473662 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:00 crc kubenswrapper[4797]: E0104 11:49:00.474235 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:00 crc kubenswrapper[4797]: E0104 11:49:00.474304 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.533155 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.533210 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.533227 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.533252 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.533270 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.636178 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.636235 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.636252 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.636276 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.636292 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.739773 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.739839 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.739865 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.739897 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.739927 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.842281 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.842326 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.842341 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.842361 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.842375 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.944296 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.944342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.944354 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.944371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[4797]: I0104 11:49:00.944383 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.046501 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.046554 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.046565 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.046581 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.046593 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.149613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.149699 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.149718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.149745 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.149764 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.253661 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.253735 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.253755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.253790 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.253813 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.357598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.357654 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.357666 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.357685 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.357700 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.462102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.462174 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.462195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.462225 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.462249 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.473733 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.473976 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:01 crc kubenswrapper[4797]: E0104 11:49:01.474247 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:01 crc kubenswrapper[4797]: E0104 11:49:01.474462 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.496278 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.523560 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.547758 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.564453 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.564518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.564541 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.564570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.564594 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.575206 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.598371 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.620163 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.645805 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.661140 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.667549 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.667682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.667752 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.667872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.667972 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.679524 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.697144 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.716886 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.735211 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.754036 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.771972 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.772071 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.772088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.772112 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.772129 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.788872 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.814878 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.832468 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:01Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.875776 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.875839 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.875856 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.875881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.875900 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.979580 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.979681 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.979701 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.979723 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[4797]: I0104 11:49:01.979740 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.082955 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.083071 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.083098 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.083133 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.083157 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.185840 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.185890 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.185902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.185921 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.185935 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.289057 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.289102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.289110 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.289124 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.289134 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.392323 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.392689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.393078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.393398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.393717 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.473928 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:02 crc kubenswrapper[4797]: E0104 11:49:02.474355 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.475057 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:02 crc kubenswrapper[4797]: E0104 11:49:02.475310 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.498088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.498170 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.498187 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.498213 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.498233 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.600799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.601169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.601327 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.601560 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.601702 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.705028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.705086 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.705102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.705124 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.705142 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.807966 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.808081 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.808097 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.808117 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.808132 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.911807 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.912201 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.912348 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.912475 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[4797]: I0104 11:49:02.912588 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.015765 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.015814 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.015833 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.015859 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.015881 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.118656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.118710 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.118726 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.118749 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.118768 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.222568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.222894 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.223126 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.223292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.223431 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.326936 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.327036 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.327055 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.327081 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.327099 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.421651 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.421764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.421788 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.421823 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.421844 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.443830 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:03Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.450440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.450510 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.450531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.450559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.450577 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.470882 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:03Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.474470 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.474729 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.475413 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.475698 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.478125 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.478215 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.478241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.478280 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.478303 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.499441 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:03Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.504419 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.504502 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.504526 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.504611 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.504635 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.525248 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:03Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.531602 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.531667 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.531686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.531720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.531738 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.558567 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:03Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.558813 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.563296 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.563352 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.563370 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.563396 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.563415 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.666432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.666467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.666478 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.666493 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.666504 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.769575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.769642 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.769659 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.769684 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.769701 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.775979 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.776143 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:03 crc kubenswrapper[4797]: E0104 11:49:03.776204 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:11.776187072 +0000 UTC m=+50.633373791 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.872929 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.873049 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.873068 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.873094 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.873113 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.975882 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.975941 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.975958 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.975980 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[4797]: I0104 11:49:03.976034 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.078918 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.078978 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.079034 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.079063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.079081 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.182038 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.182099 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.182120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.182146 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.182168 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.285715 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.285935 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.285969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.286042 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.286070 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.389163 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.389219 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.389230 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.389247 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.389259 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.473699 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.473731 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:04 crc kubenswrapper[4797]: E0104 11:49:04.473912 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:04 crc kubenswrapper[4797]: E0104 11:49:04.474082 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.491464 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.491498 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.491509 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.491524 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.491537 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.594569 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.594630 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.594647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.594671 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.594691 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.697344 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.697394 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.697410 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.697433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.697451 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.799925 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.800270 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.800522 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.800658 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.800786 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.903053 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.903091 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.903104 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.903120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[4797]: I0104 11:49:04.903132 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.006168 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.006245 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.006266 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.006291 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.006307 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.109432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.109535 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.109622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.109647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.109667 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.212728 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.213604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.213757 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.213888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.214041 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.320711 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.320777 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.320793 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.320820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.320840 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.424431 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.424491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.424508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.424533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.424549 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.473795 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.473837 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:05 crc kubenswrapper[4797]: E0104 11:49:05.473982 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:05 crc kubenswrapper[4797]: E0104 11:49:05.474127 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.528152 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.528215 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.528233 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.528255 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.528273 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.631498 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.631546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.631563 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.631586 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.631603 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.735292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.735365 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.735389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.735421 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.735444 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.838315 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.838406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.838435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.838469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.838492 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.942641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.942706 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.942723 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.942749 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[4797]: I0104 11:49:05.942766 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.046360 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.046425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.046443 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.046469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.046487 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.149652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.149714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.149732 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.149754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.149773 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.252690 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.252745 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.252763 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.252787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.252806 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.356365 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.356433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.356458 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.356487 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.356511 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.460129 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.460195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.460216 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.460243 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.460261 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.473071 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:06 crc kubenswrapper[4797]: E0104 11:49:06.473428 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.473095 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:06 crc kubenswrapper[4797]: E0104 11:49:06.473819 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.563368 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.563753 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.563943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.564156 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.564292 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.667565 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.667615 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.667629 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.667651 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.667665 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.770740 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.770956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.771015 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.771051 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.771078 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.873892 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.873954 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.873972 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.874020 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.874039 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.976659 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.977099 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.977265 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.977481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[4797]: I0104 11:49:06.977656 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.080577 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.080639 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.080655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.080680 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.080697 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.183453 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.183513 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.183531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.183557 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.183579 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.287509 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.288162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.288201 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.288232 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.288251 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.390919 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.391029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.391056 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.391081 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.391099 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.473863 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:07 crc kubenswrapper[4797]: E0104 11:49:07.474109 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.474535 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:07 crc kubenswrapper[4797]: E0104 11:49:07.474660 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.494119 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.494175 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.494192 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.494214 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.494230 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.597132 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.597190 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.597207 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.597229 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.597250 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.700632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.700687 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.700700 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.700716 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.700730 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.803349 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.803397 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.803410 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.803429 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.803441 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.906502 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.906573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.906596 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.906625 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[4797]: I0104 11:49:07.906649 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.010017 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.010079 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.010097 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.010120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.010139 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.113508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.113590 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.113615 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.113644 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.113668 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.216421 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.216973 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.217042 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.217076 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.217107 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.320730 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.320815 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.320837 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.320868 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.320890 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.423747 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.424090 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.424264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.424440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.424590 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.473422 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:08 crc kubenswrapper[4797]: E0104 11:49:08.473598 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.473434 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:08 crc kubenswrapper[4797]: E0104 11:49:08.473719 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.527784 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.527849 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.527868 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.527893 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.527916 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.630477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.630527 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.630560 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.630578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.630590 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.734098 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.734152 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.734169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.734192 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.734209 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.836824 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.836865 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.836873 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.836887 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.836896 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.939858 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.939894 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.939906 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.939924 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[4797]: I0104 11:49:08.939935 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.042755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.042799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.042809 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.042825 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.042836 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.145662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.145728 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.145748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.145773 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.145790 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.248740 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.248800 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.248817 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.248840 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.248857 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.352114 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.352177 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.352198 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.352226 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.352248 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.454780 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.455192 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.455238 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.455265 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.455282 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.473321 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.473382 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:09 crc kubenswrapper[4797]: E0104 11:49:09.473501 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:09 crc kubenswrapper[4797]: E0104 11:49:09.473636 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.558597 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.558707 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.559219 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.560087 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.560250 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.663310 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.663367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.663384 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.663407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.663424 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.766881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.766940 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.766958 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.766984 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.767038 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.870343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.870705 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.870945 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.871222 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.871619 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.975116 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.975466 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.975592 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.975714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[4797]: I0104 11:49:09.975843 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.078612 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.078949 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.079252 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.079438 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.079601 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.182513 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.182561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.182578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.182604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.182622 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.285237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.285289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.285306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.285329 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.285352 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.388631 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.389498 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.389689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.389963 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.390411 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.474037 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:10 crc kubenswrapper[4797]: E0104 11:49:10.474240 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.474067 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:10 crc kubenswrapper[4797]: E0104 11:49:10.474723 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.499381 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.499760 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.499962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.500237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.500433 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.580821 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.595830 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.603747 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.603967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.604080 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.604162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.604273 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.605413 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.630536 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.678313 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.692551 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.702537 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.706478 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.706506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.706518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.706533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.706544 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.722305 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.737945 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.760029 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.777463 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.796112 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.808955 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.809042 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.809065 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.809088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.809105 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.814791 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.829695 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.853685 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.871372 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.887370 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.901961 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:10Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.911367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.911477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.911566 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.911680 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[4797]: I0104 11:49:10.911777 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.014482 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.014528 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.014543 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.014561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.014573 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.117351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.117451 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.117471 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.117495 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.117511 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.219891 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.219929 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.219937 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.219952 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.219961 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.322632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.322696 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.322715 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.322738 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.322754 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.425480 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.425529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.425545 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.425596 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.425619 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.459522 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.459666 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:43.459635494 +0000 UTC m=+82.316822243 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.459715 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.459757 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.459793 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.459881 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460054 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460111 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:43.460095316 +0000 UTC m=+82.317282065 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460364 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460385 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460402 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460445 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:43.460431075 +0000 UTC m=+82.317617824 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460830 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460878 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:43.460863826 +0000 UTC m=+82.318050575 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460958 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.460978 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.461027 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.461067 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:43.461054521 +0000 UTC m=+82.318241260 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.473929 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.474145 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.474768 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.474946 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.475262 4797 scope.go:117] "RemoveContainer" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.494501 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.517812 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.529638 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.529892 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.530098 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.530351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.530538 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.541106 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.568700 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.592539 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.617619 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.634077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.634247 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.634277 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.634359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.634388 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.638611 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.655511 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.681209 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.700926 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.719900 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.736927 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.737267 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.737318 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.737343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.737374 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.737401 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.752104 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.801672 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.817009 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.819627 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/1.log" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.822841 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.823441 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.840450 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.840496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.840508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.840525 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.840537 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.851429 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.864684 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.864859 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: E0104 11:49:11.864931 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.864915243 +0000 UTC m=+66.722101952 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.869617 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.881739 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.896671 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.911076 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.928387 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.942648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.942704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.942729 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.942764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.942786 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.950574 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.970632 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:11 crc kubenswrapper[4797]: I0104 11:49:11.997524 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:11Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.018306 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.037153 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.045735 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.045776 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.045787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.045809 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.045822 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.066914 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.086258 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.100367 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.114829 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.128427 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.139738 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.147716 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.147743 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.147754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.147770 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.147782 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.159745 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.178127 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.250535 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.250574 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.250584 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.250598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.250610 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.354361 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.354417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.354427 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.354447 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.354460 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.457599 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.457664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.457682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.457709 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.457727 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.473494 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.473503 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:12 crc kubenswrapper[4797]: E0104 11:49:12.473619 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:12 crc kubenswrapper[4797]: E0104 11:49:12.473788 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.561314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.561389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.561407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.561437 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.561456 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.664440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.664506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.664525 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.664553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.664571 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.767424 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.767453 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.767461 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.767474 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.767484 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.829025 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/2.log" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.829538 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/1.log" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.832183 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" exitCode=1 Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.832209 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.832239 4797 scope.go:117] "RemoveContainer" containerID="cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.833427 4797 scope.go:117] "RemoveContainer" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" Jan 04 11:49:12 crc kubenswrapper[4797]: E0104 11:49:12.833717 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.854750 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.869619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.869655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.869667 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.869683 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.869694 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.875953 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.896731 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.911770 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.926941 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.939825 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.957792 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.972355 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.974846 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.974913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.974931 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.974956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.974974 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[4797]: I0104 11:49:12.996580 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:12Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.015852 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.033012 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.050954 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.070422 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.078757 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.078811 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.078834 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.078861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.078881 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.091612 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.108164 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.144388 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf8a91a61697636fae8c18641044d0673b27f414f31790add5555b6a45b5b8c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:54Z\\\",\\\"message\\\":\\\"l (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:53.782786 6205 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:53.792357 6205 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:48:53.792397 6205 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:53.792435 6205 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:53.792456 6205 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:53.792464 6205 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:53.792485 6205 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:53.792520 6205 factory.go:656] Stopping watch factory\\\\nI0104 11:48:53.792548 6205 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:53.792561 6205 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:53.792572 6205 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:53.792585 6205 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:53.792595 6205 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:53.792607 6205 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.168867 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.182673 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.182740 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.182762 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.182792 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.182814 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.285894 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.285971 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.286032 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.286065 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.286087 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.391122 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.391196 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.391214 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.391237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.391254 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.473269 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.473362 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.473515 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.473635 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.496271 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.496348 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.496373 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.496407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.496431 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.599469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.599534 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.599553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.599578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.599597 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.702862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.702922 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.702939 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.702961 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.702979 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.804629 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.804686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.804706 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.804732 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.804751 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.827205 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.832824 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.832951 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.833026 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.833052 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.833105 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.839732 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/2.log" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.845180 4797 scope.go:117] "RemoveContainer" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.845451 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.853893 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.863482 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.863540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.863551 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.863572 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.863584 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.866131 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.880172 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.884585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.884663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.884691 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.884726 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.884752 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.889373 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.903926 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.906037 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.911456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.911563 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.911589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.911619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.911643 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.918541 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.929446 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: E0104 11:49:13.929601 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.931450 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.931483 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.931493 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.931511 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.931525 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.937811 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.952864 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.976530 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:13 crc kubenswrapper[4797]: I0104 11:49:13.997513 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:13Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.012602 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.031218 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.034563 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.034620 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.034638 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.034664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.034682 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.045197 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.066121 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.083792 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.100564 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.119846 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.138183 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.138278 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.138301 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.138334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.138356 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.150837 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.171492 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.241593 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.241662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.241682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.241708 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.241726 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.344814 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.344887 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.344908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.344936 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.344957 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.447531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.447590 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.447609 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.447635 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.447653 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.473900 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.473933 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:14 crc kubenswrapper[4797]: E0104 11:49:14.474104 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:14 crc kubenswrapper[4797]: E0104 11:49:14.474236 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.550907 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.551017 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.551043 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.551082 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.551106 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.654191 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.654250 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.654268 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.654293 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.654315 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.757640 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.757705 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.757724 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.757748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.757768 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.861085 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.861171 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.861190 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.861277 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.861298 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.964491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.964547 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.964566 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.964589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[4797]: I0104 11:49:14.964606 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.067204 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.067275 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.067294 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.067318 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.067336 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.170724 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.170788 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.170805 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.170828 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.170845 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.273804 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.273845 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.273856 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.273872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.273883 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.376839 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.376896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.376914 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.376939 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.376955 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.473145 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.473273 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:15 crc kubenswrapper[4797]: E0104 11:49:15.473457 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:15 crc kubenswrapper[4797]: E0104 11:49:15.473620 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.479270 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.479360 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.479379 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.479401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.479418 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.582678 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.582746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.582772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.582799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.582819 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.685780 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.685870 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.685888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.685910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.685927 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.789087 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.789140 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.789157 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.789185 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.789203 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.892125 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.892201 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.892224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.892250 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.892271 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.995245 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.995303 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.995320 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.995342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[4797]: I0104 11:49:15.995360 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.099109 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.099484 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.099625 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.099778 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.099909 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.203340 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.203398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.203416 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.203442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.203460 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.306392 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.306786 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.307072 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.307283 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.307497 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.412289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.412692 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.412872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.413094 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.415679 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.473473 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.473604 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:16 crc kubenswrapper[4797]: E0104 11:49:16.473659 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:16 crc kubenswrapper[4797]: E0104 11:49:16.473758 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.520640 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.520701 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.520724 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.520752 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.520775 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.623336 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.623559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.623588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.623616 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.623637 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.726055 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.726387 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.726540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.726687 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.726852 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.829564 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.829619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.829641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.829670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.829689 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.932403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.932713 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.932923 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.933223 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[4797]: I0104 11:49:16.933446 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.036384 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.036439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.036461 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.036489 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.036513 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.140241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.140615 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.140942 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.141375 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.141717 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.244706 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.244756 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.244773 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.244796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.244813 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.347246 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.347314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.347334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.347359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.347382 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.450441 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.450529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.450560 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.450594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.450621 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.474094 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.474257 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:17 crc kubenswrapper[4797]: E0104 11:49:17.474385 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:17 crc kubenswrapper[4797]: E0104 11:49:17.474528 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.553257 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.553306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.553320 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.553340 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.553353 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.657759 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.657831 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.657854 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.657883 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.657917 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.760617 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.761218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.761396 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.761563 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.761711 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.864441 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.864508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.864526 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.864552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.864569 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.967203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.967268 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.967285 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.967310 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[4797]: I0104 11:49:17.967327 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.070441 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.070524 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.070548 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.070578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.070601 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.173554 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.173605 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.173622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.173645 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.173663 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.276910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.276967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.277027 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.277059 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.277077 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.379682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.379746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.379765 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.379790 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.379809 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.473826 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.473826 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:18 crc kubenswrapper[4797]: E0104 11:49:18.474070 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:18 crc kubenswrapper[4797]: E0104 11:49:18.474166 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.482444 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.482502 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.482518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.482544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.482562 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.585188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.586084 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.586124 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.586150 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.586209 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.689665 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.689725 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.689745 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.689803 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.689825 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.792783 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.792852 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.792875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.792907 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.792929 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.895970 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.896053 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.896185 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.896248 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.896273 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.999304 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.999364 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.999383 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.999407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[4797]: I0104 11:49:18.999425 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.102122 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.102180 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.102197 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.102224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.102242 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.206422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.206481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.206499 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.206524 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.206542 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.309656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.309727 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.309744 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.309769 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.309791 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.412729 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.412798 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.412816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.412841 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.412858 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.473225 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:19 crc kubenswrapper[4797]: E0104 11:49:19.473597 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.473692 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:19 crc kubenswrapper[4797]: E0104 11:49:19.474133 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.515867 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.515926 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.515945 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.515968 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.516014 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.618669 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.618739 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.618757 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.618783 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.618802 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.721960 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.722063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.722083 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.722112 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.722128 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.825067 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.825136 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.825154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.825181 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.825200 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.927929 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.928028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.928049 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.928075 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[4797]: I0104 11:49:19.928091 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.031374 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.031462 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.031486 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.031518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.031540 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.134434 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.134494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.134513 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.134536 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.134555 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.237609 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.237670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.237687 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.237722 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.237750 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.341121 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.341200 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.341220 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.341246 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.341264 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.444133 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.444185 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.444203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.444229 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.444247 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.473798 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:20 crc kubenswrapper[4797]: E0104 11:49:20.474095 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.474233 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:20 crc kubenswrapper[4797]: E0104 11:49:20.474365 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.547825 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.547893 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.547912 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.547941 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.547960 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.650343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.650406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.650422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.650446 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.650462 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.753188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.753263 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.753281 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.753308 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.753327 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.856883 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.856941 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.856957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.856980 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.857026 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.959445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.959492 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.959505 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.959522 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[4797]: I0104 11:49:20.959534 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.062893 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.062953 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.062975 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.063028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.063047 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.166438 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.166499 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.166517 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.166541 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.166561 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.269935 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.270028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.270048 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.270080 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.270099 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.373845 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.373878 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.373903 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.373946 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.373967 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.473714 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.473798 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:21 crc kubenswrapper[4797]: E0104 11:49:21.473865 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:21 crc kubenswrapper[4797]: E0104 11:49:21.474037 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.477411 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.477474 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.477491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.477521 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.477539 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.492470 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.510982 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.531625 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.559720 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.579773 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.579874 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.580147 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.580174 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.580200 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.580218 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.596685 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.612535 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.629470 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.643345 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.660111 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.680065 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.683372 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.683420 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.683439 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.683463 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.683481 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.698321 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.710864 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.728921 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.744655 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.760619 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.774287 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:21Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.786339 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.786412 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.786436 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.786466 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.786487 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.919264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.919502 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.919577 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.919648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[4797]: I0104 11:49:21.919711 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.022293 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.022355 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.022376 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.022401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.022419 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.127781 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.127838 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.127863 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.127891 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.127914 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.231380 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.231451 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.231470 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.231500 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.231519 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.334241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.334589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.334748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.334913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.335100 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.437962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.438312 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.438605 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.438803 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.439055 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.473515 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:22 crc kubenswrapper[4797]: E0104 11:49:22.473684 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.473761 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:22 crc kubenswrapper[4797]: E0104 11:49:22.473827 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.543262 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.543307 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.543320 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.543338 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.543349 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.647652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.647693 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.647705 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.647720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.647730 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.750507 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.750587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.750606 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.750633 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.750650 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.854436 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.854519 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.854544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.854575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.854600 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.957392 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.957458 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.957477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.957503 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[4797]: I0104 11:49:22.957521 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.061398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.061496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.061517 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.061545 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.061596 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.164559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.164695 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.164772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.164809 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.164831 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.267861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.268186 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.268334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.268466 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.268580 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.371241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.371306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.371325 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.371353 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.371372 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.473137 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.473217 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:23 crc kubenswrapper[4797]: E0104 11:49:23.473313 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:23 crc kubenswrapper[4797]: E0104 11:49:23.473471 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.474290 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.474336 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.474350 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.474374 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.474387 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.576833 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.576885 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.576902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.576926 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.576946 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.680358 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.680414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.680430 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.680455 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.680474 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.783665 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.783721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.783738 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.783762 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.783786 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.888567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.888616 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.888632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.888655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.888672 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.992154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.992542 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.992704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.992876 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[4797]: I0104 11:49:23.993079 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.019165 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.019568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.019788 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.020033 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.020247 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.041534 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.047703 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.048064 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.048236 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.048389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.048536 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.068831 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.073101 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.073167 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.073191 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.073219 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.073236 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.093489 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.098161 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.098240 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.098257 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.098310 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.098326 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.118667 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.122813 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.122841 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.122871 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.122886 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.122895 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.140791 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.141008 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.143340 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.143400 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.143415 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.143433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.143445 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.246106 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.246152 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.246166 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.246187 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.246202 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.347908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.347937 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.347945 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.347955 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.347964 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.453043 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.453077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.453088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.453102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.453114 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.473573 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.473678 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.474066 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:24 crc kubenswrapper[4797]: E0104 11:49:24.474421 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.555398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.555830 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.555968 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.556156 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.556289 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.658827 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.659194 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.659343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.659486 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.659602 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.762351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.762632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.762800 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.762957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.763127 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.866652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.866741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.866759 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.866781 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.866801 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.970537 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.970590 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.970607 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.970629 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[4797]: I0104 11:49:24.970646 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.073324 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.073413 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.073432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.073456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.073476 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.176590 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.176646 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.176659 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.176679 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.176690 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.279603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.279668 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.279686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.279711 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.279728 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.382666 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.382730 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.382748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.382772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.382790 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.474045 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.474079 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:25 crc kubenswrapper[4797]: E0104 11:49:25.474227 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:25 crc kubenswrapper[4797]: E0104 11:49:25.474388 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.486433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.486473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.486482 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.486496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.486506 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.588315 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.588364 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.588373 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.588387 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.588396 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.690518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.690558 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.690566 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.690581 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.690590 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.792974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.793041 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.793056 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.793076 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.793088 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.895380 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.895409 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.895417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.895430 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.895439 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.997204 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.997233 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.997241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.997254 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[4797]: I0104 11:49:25.997262 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.099179 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.099216 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.099225 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.099240 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.099249 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.202141 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.202175 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.202184 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.202199 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.202207 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.304707 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.304769 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.304782 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.304798 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.304812 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.407854 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.407896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.407904 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.407920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.407928 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.473462 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.473474 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:26 crc kubenswrapper[4797]: E0104 11:49:26.473581 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:26 crc kubenswrapper[4797]: E0104 11:49:26.473664 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.510353 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.510394 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.510406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.510422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.510432 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.613364 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.613401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.613411 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.613447 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.613459 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.715289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.715344 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.715356 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.715370 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.715382 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.817538 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.817577 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.817587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.817603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.817614 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.920425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.920470 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.920481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.920497 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[4797]: I0104 11:49:26.920508 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.023031 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.023060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.023069 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.023082 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.023090 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.125028 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.125069 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.125078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.125091 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.125100 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.227533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.227610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.227634 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.227665 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.227687 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.330191 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.330235 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.330253 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.330276 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.330293 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.433209 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.433291 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.433317 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.433347 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.433373 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.474095 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.474099 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:27 crc kubenswrapper[4797]: E0104 11:49:27.474268 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:27 crc kubenswrapper[4797]: E0104 11:49:27.474443 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.536174 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.536203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.536212 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.536225 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.536234 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.639357 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.639401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.639412 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.639428 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.639439 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.741604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.741647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.741657 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.741674 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.741687 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.844677 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.844754 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.844783 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.844814 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.844852 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.883026 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:27 crc kubenswrapper[4797]: E0104 11:49:27.883473 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:27 crc kubenswrapper[4797]: E0104 11:49:27.883567 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:59.883537073 +0000 UTC m=+98.740723822 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.947269 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.947314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.947326 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.947343 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[4797]: I0104 11:49:27.947355 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.050221 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.050280 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.050297 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.050321 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.050339 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.153422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.153504 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.153516 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.153533 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.153545 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.256384 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.256428 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.256437 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.256452 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.256463 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.359672 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.359721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.359733 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.359750 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.359762 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.461748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.461801 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.461818 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.461839 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.461856 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.473262 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.473264 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:28 crc kubenswrapper[4797]: E0104 11:49:28.473800 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:28 crc kubenswrapper[4797]: E0104 11:49:28.473900 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.474439 4797 scope.go:117] "RemoveContainer" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" Jan 04 11:49:28 crc kubenswrapper[4797]: E0104 11:49:28.474715 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.565555 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.565612 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.565624 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.565644 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.565656 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.667733 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.667802 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.667820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.667847 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.667865 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.770531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.770579 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.770590 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.770609 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.770622 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.873744 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.873799 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.873811 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.873830 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.873844 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.943496 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/0.log" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.943546 4797 generic.go:334] "Generic (PLEG): container finished" podID="91fac858-36ec-4a4b-ba0d-014f6b96b421" containerID="f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b" exitCode=1 Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.943580 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerDied","Data":"f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.943925 4797 scope.go:117] "RemoveContainer" containerID="f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.960464 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.976854 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.977260 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.977279 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.977303 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.977322 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.977402 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:28 crc kubenswrapper[4797]: I0104 11:49:28.994059 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.014133 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.034255 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.074788 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.085444 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.085482 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.085490 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.085505 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.085513 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.090960 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.109134 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.121231 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.133966 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.146656 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.163677 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.175636 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.187684 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.187737 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.187755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.187778 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.187796 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.191812 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.213187 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.227157 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.242017 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.290376 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.290417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.290426 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.290440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.290450 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.393063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.393101 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.393110 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.393125 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.393134 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.473330 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:29 crc kubenswrapper[4797]: E0104 11:49:29.473457 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.473287 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:29 crc kubenswrapper[4797]: E0104 11:49:29.474078 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.495324 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.495405 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.495426 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.495457 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.495476 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.598136 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.598202 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.598221 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.598249 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.598270 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.700534 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.700648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.700674 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.700703 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.700731 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.803962 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.804048 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.804065 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.804089 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.804107 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.907363 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.907394 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.907403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.907417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.907428 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.950392 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/0.log" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.950487 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerStarted","Data":"65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f"} Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.971439 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[4797]: I0104 11:49:29.998704 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.012536 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.012614 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.012634 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.012663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.012685 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.017398 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.036546 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.053895 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.072317 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.094063 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.109320 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.115419 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.115496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.115519 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.115549 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.115570 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.125899 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.142372 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.157158 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.178205 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.194713 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.211890 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.217816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.217858 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.217866 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.217884 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.217894 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.227732 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.250764 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.265959 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.320648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.320698 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.320712 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.320732 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.320745 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.423544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.423593 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.423605 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.423623 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.423635 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.473293 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.473319 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:30 crc kubenswrapper[4797]: E0104 11:49:30.473408 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:30 crc kubenswrapper[4797]: E0104 11:49:30.473519 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.526305 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.526364 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.526379 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.526402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.526417 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.629460 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.629518 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.629527 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.629548 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.629563 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.732519 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.732577 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.732594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.732617 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.732632 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.836221 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.836279 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.836295 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.836321 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.836340 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.939445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.939494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.939506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.939523 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[4797]: I0104 11:49:30.939536 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.041627 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.041692 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.041710 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.041734 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.041751 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.144367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.144409 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.144418 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.144434 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.144447 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.247339 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.247398 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.247410 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.247434 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.247449 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.350124 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.350191 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.350211 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.350237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.350255 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.453351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.453399 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.453414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.453434 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.453467 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.473879 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:31 crc kubenswrapper[4797]: E0104 11:49:31.474038 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.474153 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:31 crc kubenswrapper[4797]: E0104 11:49:31.474377 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.485498 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.498581 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.510099 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.523940 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.536564 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.548927 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.555803 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.555862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.555877 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.555899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.555916 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.563277 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.575374 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.584376 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.597509 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.606826 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.617068 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.625577 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.635763 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.649317 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.660612 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.660650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.660663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.660684 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.660841 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.662289 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.682409 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.762845 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.762891 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.762902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.762919 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.762931 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.865289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.865356 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.865377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.865403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.865421 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.967686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.967721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.967731 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.967746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[4797]: I0104 11:49:31.967758 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.071492 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.071540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.071552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.071570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.071584 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.174592 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.174641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.174653 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.174670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.174682 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.276658 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.276702 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.276714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.276730 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.276740 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.379466 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.379552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.379575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.379600 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.379618 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.473977 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.474129 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:32 crc kubenswrapper[4797]: E0104 11:49:32.474176 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:32 crc kubenswrapper[4797]: E0104 11:49:32.474461 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.482166 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.482233 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.482250 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.482275 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.482293 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.585412 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.585477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.585496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.585521 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.585538 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.688414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.688437 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.688445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.688458 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.688467 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.790634 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.790662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.790670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.790683 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.790693 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.893098 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.893169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.893181 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.893215 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.893235 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.995801 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.995852 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.995863 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.995889 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[4797]: I0104 11:49:32.995900 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.098632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.098686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.098700 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.098721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.098734 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.201338 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.201416 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.201435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.201467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.201488 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.304877 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.304940 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.304957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.305029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.305070 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.408041 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.408078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.408087 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.408120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.408133 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.473537 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.473584 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:33 crc kubenswrapper[4797]: E0104 11:49:33.473668 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:33 crc kubenswrapper[4797]: E0104 11:49:33.473927 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.511334 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.511402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.511421 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.511445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.511462 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.613895 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.613951 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.613970 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.614025 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.614046 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.716722 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.716796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.716820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.716847 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.716869 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.819808 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.819847 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.819856 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.819875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.819884 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.922592 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.922627 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.922640 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.922654 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[4797]: I0104 11:49:33.922663 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.024544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.024587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.024596 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.024610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.024620 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.132154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.132210 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.132222 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.132241 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.132253 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.234788 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.234844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.234861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.234884 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.234906 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.291943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.292036 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.292054 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.292078 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.292096 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.306935 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.311124 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.311173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.311190 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.311213 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.311230 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.324831 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.328442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.328473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.328481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.328496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.328507 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.344584 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.355236 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.355268 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.355279 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.355292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.355305 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.372709 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.376539 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.376564 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.376573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.376585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.376592 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.393074 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.393211 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.394671 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.394692 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.394700 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.394711 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.394719 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.473504 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.473528 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.473630 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:34 crc kubenswrapper[4797]: E0104 11:49:34.473710 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.496588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.496633 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.496648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.496664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.496674 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.599293 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.599351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.599368 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.599391 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.599408 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.702947 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.703030 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.703048 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.703068 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.703085 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.806252 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.806310 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.806328 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.806351 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.806427 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.908865 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.908924 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.908942 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.908966 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[4797]: I0104 11:49:34.909013 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.011830 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.011883 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.011899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.011923 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.011939 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.114908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.114965 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.115012 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.115037 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.115056 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.217548 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.217577 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.217588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.217601 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.217611 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.320298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.320338 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.320347 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.320361 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.320371 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.423313 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.423372 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.423389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.423413 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.423432 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.473474 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.473628 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:35 crc kubenswrapper[4797]: E0104 11:49:35.473885 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:35 crc kubenswrapper[4797]: E0104 11:49:35.474839 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.525878 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.525952 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.525969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.526026 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.526051 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.630107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.630156 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.630172 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.630195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.630212 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.732652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.732708 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.732718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.732734 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.732745 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.835668 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.835751 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.835777 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.835807 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.835831 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.939689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.939777 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.939795 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.939820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[4797]: I0104 11:49:35.939838 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.043025 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.043091 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.043109 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.043133 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.043152 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.146485 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.146566 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.146596 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.146627 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.146648 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.250549 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.250608 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.250626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.250650 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.250667 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.353370 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.353455 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.353472 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.353494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.353513 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.457054 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.457116 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.457133 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.457158 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.457176 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.473593 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:36 crc kubenswrapper[4797]: E0104 11:49:36.473767 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.474026 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:36 crc kubenswrapper[4797]: E0104 11:49:36.474138 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.562184 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.562253 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.562276 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.562308 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.562332 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.666570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.666647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.666664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.666690 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.666707 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.770866 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.770899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.770910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.770925 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.770935 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.877437 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.877508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.877529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.877557 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.877584 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.980377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.980438 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.980456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.980481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[4797]: I0104 11:49:36.980498 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.083690 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.083749 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.083767 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.083794 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.083812 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.186273 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.186428 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.186448 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.186472 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.186494 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.289325 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.290097 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.290129 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.290157 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.290176 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.392423 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.392496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.392531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.392553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.392565 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.473419 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.473429 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:37 crc kubenswrapper[4797]: E0104 11:49:37.473607 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:37 crc kubenswrapper[4797]: E0104 11:49:37.473839 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.495146 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.495207 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.495228 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.495252 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.495270 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.598389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.598470 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.598490 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.598514 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.598534 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.701282 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.701335 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.701354 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.701377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.701394 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.804968 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.805060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.805077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.805113 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.805150 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.908433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.908562 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.908589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.908614 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[4797]: I0104 11:49:37.908631 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.011717 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.011765 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.011782 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.011807 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.011825 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.114706 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.114760 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.114779 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.114803 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.114819 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.218507 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.218585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.218609 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.218639 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.218660 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.321506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.321553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.321569 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.321589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.321604 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.423704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.423768 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.423786 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.423813 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.423831 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.473826 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.473884 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:38 crc kubenswrapper[4797]: E0104 11:49:38.474115 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:38 crc kubenswrapper[4797]: E0104 11:49:38.474239 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.526826 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.526880 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.526892 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.526910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.526922 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.629741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.629797 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.629813 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.629835 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.629853 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.733105 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.733178 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.733200 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.733228 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.733249 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.835737 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.835811 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.835828 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.835852 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.835868 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.938601 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.938672 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.938693 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.938725 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[4797]: I0104 11:49:38.938745 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.042380 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.042461 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.042479 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.042505 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.042523 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.146203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.146267 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.146285 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.146311 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.146329 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.249538 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.249606 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.249623 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.249648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.249667 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.353355 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.353416 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.353435 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.353464 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.353484 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.456363 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.456411 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.456422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.456440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.456452 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.473211 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.473675 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:39 crc kubenswrapper[4797]: E0104 11:49:39.473847 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:39 crc kubenswrapper[4797]: E0104 11:49:39.474128 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.474285 4797 scope.go:117] "RemoveContainer" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.561306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.561792 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.562025 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.562208 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.562347 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.664933 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.665371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.665497 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.665629 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.665744 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.769224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.769282 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.769298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.769323 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.769340 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.872902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.872976 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.873044 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.873075 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.873099 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.977328 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.977390 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.977415 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.977446 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[4797]: I0104 11:49:39.977469 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.082117 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.082183 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.082201 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.082229 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.082247 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.185465 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.185512 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.185524 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.185542 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.185556 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.288776 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.288808 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.288819 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.288834 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.288843 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.391227 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.391290 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.391306 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.391331 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.391350 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.473843 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.473907 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:40 crc kubenswrapper[4797]: E0104 11:49:40.474076 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:40 crc kubenswrapper[4797]: E0104 11:49:40.474215 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.494153 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.494208 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.494226 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.494248 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.494265 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.597718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.597797 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.597823 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.597856 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.597881 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.700367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.700430 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.700446 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.700473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.700491 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.803210 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.803251 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.803260 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.803272 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.803282 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.905544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.905601 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.905621 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.905644 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.905663 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.992453 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/2.log" Jan 04 11:49:40 crc kubenswrapper[4797]: I0104 11:49:40.997086 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.008408 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.008483 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.008507 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.008538 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.008557 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.111827 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.111895 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.111906 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.111923 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.111934 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.214585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.214641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.214658 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.214680 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.214696 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.318468 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.318499 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.318511 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.318527 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.318539 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.421641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.421777 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.421796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.421827 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.421849 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.473663 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.473738 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:41 crc kubenswrapper[4797]: E0104 11:49:41.473839 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:41 crc kubenswrapper[4797]: E0104 11:49:41.474092 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.493570 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.522232 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.525190 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.525248 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.525271 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.525299 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.525323 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.542685 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.565530 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.584208 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.603823 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.621245 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.629162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.629477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.629685 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.629973 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.630149 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.638395 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.658107 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.680809 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.699787 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.720622 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.733258 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.733300 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.733312 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.733332 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.733349 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.744526 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.777443 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.806905 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.836215 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.836255 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.836266 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.836283 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.836296 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.852374 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.872148 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.938017 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.938069 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.938086 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.938107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[4797]: I0104 11:49:41.938124 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.001071 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.021853 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.037191 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.044372 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.044407 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.044418 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.044433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.044445 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.064703 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.090125 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.118884 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.131269 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.142836 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.146403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.146445 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.146457 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.146476 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.146489 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.167397 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.187187 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.200793 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.216810 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.226961 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.241817 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.248720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.248796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.248816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.248844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.248867 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.255938 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.270866 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.294675 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.308152 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.351422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.351537 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.351558 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.351585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.351605 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.454425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.454477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.454494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.454546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.454564 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.473194 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.473249 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:42 crc kubenswrapper[4797]: E0104 11:49:42.473359 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:42 crc kubenswrapper[4797]: E0104 11:49:42.473553 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.558026 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.558090 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.558108 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.558135 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.558151 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.661571 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.661618 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.661635 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.661657 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.661672 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.765131 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.765195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.765213 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.765237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.765256 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.868336 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.868406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.868429 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.868462 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.868488 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.971965 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.972037 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.972049 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.972069 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[4797]: I0104 11:49:42.972085 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.006919 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/3.log" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.007900 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/2.log" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.012384 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" exitCode=1 Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.012441 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.012504 4797 scope.go:117] "RemoveContainer" containerID="2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.013619 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.014010 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.041480 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.063951 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.075126 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.075200 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.075226 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.075258 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.075278 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.081227 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.112297 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ada9e3dcf4b46405b076f3aab539ccf4d7809f7c26da54541c5e19db385fd35\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:12Z\\\",\\\"message\\\":\\\".AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:49:12.499904 6405 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:49:12.499921 6405 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:12.499931 6405 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:12.500046 6405 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:12.500067 6405 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:12.500129 6405 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:12.500248 6405 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:49:12.500261 6405 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:49:12.500310 6405 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:12.500386 6405 factory.go:656] Stopping watch factory\\\\nI0104 11:49:12.500401 6405 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:12.500410 6405 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:12.500419 6405 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:49:12.500428 6405 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:42Z\\\",\\\"message\\\":\\\"309257 6798 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:49:42.309298 6798 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:42.309332 6798 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:49:42.309345 6798 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:49:42.309365 6798 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:42.309368 6798 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:42.309374 6798 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:42.309375 6798 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:42.309383 6798 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:49:42.309391 6798 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:42.309396 6798 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:42.309400 6798 factory.go:656] Stopping watch factory\\\\nI0104 11:49:42.309417 6798 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:49:42.309449 6798 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:42.309471 6798 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:49:42.309565 6798 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.130877 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.150311 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.170749 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.178626 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.178697 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.178722 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.178911 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.178935 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.192688 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.212421 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.232322 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.248344 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.266690 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.283211 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.283275 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.283298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.283333 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.283371 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.284459 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.306493 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.325310 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.340707 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.360926 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:43Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.386231 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.386288 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.386305 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.386329 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.386347 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.473980 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.474109 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.474236 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.474350 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.488093 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.488230 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488269 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:47.488236229 +0000 UTC m=+146.345422978 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.488324 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488372 4797 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488445 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:47.488422364 +0000 UTC m=+146.345609103 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488465 4797 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.488379 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488535 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:47.488519287 +0000 UTC m=+146.345706166 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488581 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.488599 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488613 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488743 4797 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488796 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488833 4797 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488851 4797 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488802 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:47.488784084 +0000 UTC m=+146.345970823 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:43 crc kubenswrapper[4797]: E0104 11:49:43.488924 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:47.488903967 +0000 UTC m=+146.346090716 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.489508 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.489561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.489585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.489613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.489635 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.591881 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.591939 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.591956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.591982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.592028 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.695136 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.695209 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.695232 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.695264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.695291 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.798974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.799144 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.799168 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.799198 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.799223 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.902404 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.902454 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.902472 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.902494 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[4797]: I0104 11:49:43.902511 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.005107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.005161 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.005177 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.005198 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.005213 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.019519 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/3.log" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.024441 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.024655 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.041678 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.061114 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.081584 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.108081 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.108139 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.108155 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.108178 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.108195 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.115572 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:42Z\\\",\\\"message\\\":\\\"309257 6798 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:49:42.309298 6798 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:42.309332 6798 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:49:42.309345 6798 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:49:42.309365 6798 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:42.309368 6798 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:42.309374 6798 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:42.309375 6798 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:42.309383 6798 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:49:42.309391 6798 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:42.309396 6798 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:42.309400 6798 factory.go:656] Stopping watch factory\\\\nI0104 11:49:42.309417 6798 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:49:42.309449 6798 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:42.309471 6798 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:49:42.309565 6798 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.130951 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.148073 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.164263 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.180566 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.194982 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.210681 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.210730 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.210757 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.210780 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.210794 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.215359 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.231733 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.246338 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.265278 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.281719 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.296294 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.314099 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.314154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.314169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.314228 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.314246 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.320603 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.335718 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.417504 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.417567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.417616 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.417641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.417660 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.473447 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.473464 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.473654 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.473795 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.487666 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.521611 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.521675 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.521700 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.521732 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.521756 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.624945 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.625049 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.625067 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.625094 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.625111 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.728107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.728171 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.728188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.728212 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.728230 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.735856 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.735917 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.735943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.735976 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.736039 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.757547 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.762621 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.762670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.762694 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.762724 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.762747 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.785668 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.791245 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.791311 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.791328 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.791353 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.791373 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.813943 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.818646 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.818711 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.818728 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.818753 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.818771 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.838793 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.844154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.844220 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.844243 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.844273 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.844293 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.865602 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[4797]: E0104 11:49:44.865919 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.868726 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.868801 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.868818 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.868845 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.868863 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.971731 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.971773 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.971791 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.971809 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[4797]: I0104 11:49:44.971825 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.074062 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.074133 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.074149 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.074177 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.074195 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.177763 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.177841 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.177864 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.177893 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.177915 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.281385 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.281446 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.281463 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.281490 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.281512 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.384820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.384859 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.384877 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.384900 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.384918 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.473764 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.473870 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:45 crc kubenswrapper[4797]: E0104 11:49:45.474018 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:45 crc kubenswrapper[4797]: E0104 11:49:45.474125 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.486902 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.486949 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.486966 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.487018 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.487036 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.590149 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.590559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.590584 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.590611 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.590632 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.694404 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.694542 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.694562 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.694587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.694604 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.797335 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.797411 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.797430 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.797456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.797474 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.900779 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.900946 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.900969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.901022 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[4797]: I0104 11:49:45.901040 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.005036 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.005107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.005128 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.005154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.005173 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.107908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.108024 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.108050 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.108079 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.108102 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.211072 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.211173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.211194 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.211218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.211239 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.313474 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.313556 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.313575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.313604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.313624 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.416784 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.416852 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.416870 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.416896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.416927 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.473340 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.473395 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:46 crc kubenswrapper[4797]: E0104 11:49:46.473541 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:46 crc kubenswrapper[4797]: E0104 11:49:46.473753 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.520815 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.520896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.520920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.520952 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.520978 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.623561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.623619 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.623638 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.623662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.623680 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.727227 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.727288 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.727305 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.727327 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.727344 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.830295 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.830350 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.830367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.830391 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.830408 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.934217 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.934273 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.934290 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.934313 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[4797]: I0104 11:49:46.934330 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.040395 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.040983 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.041231 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.041416 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.041571 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.146134 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.146204 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.146224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.146249 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.146267 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.249520 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.249567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.249587 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.249613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.249631 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.352636 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.352682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.352698 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.352718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.352734 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.456227 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.456667 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.456927 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.457432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.457826 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.473765 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.474099 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:47 crc kubenswrapper[4797]: E0104 11:49:47.474774 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:47 crc kubenswrapper[4797]: E0104 11:49:47.474254 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.561095 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.561147 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.561164 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.561188 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.561205 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.664659 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.664723 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.664740 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.664764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.664782 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.767718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.767797 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.767821 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.767850 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.767872 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.870930 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.871033 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.871060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.871088 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.871109 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.973622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.973680 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.973703 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.973730 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[4797]: I0104 11:49:47.973751 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.076543 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.076628 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.076663 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.076695 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.076718 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.180281 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.180342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.180367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.180395 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.180415 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.283813 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.283877 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.283894 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.283923 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.283941 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.386681 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.386765 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.386790 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.386823 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.386847 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.473359 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.473373 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:48 crc kubenswrapper[4797]: E0104 11:49:48.473563 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:48 crc kubenswrapper[4797]: E0104 11:49:48.473775 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.490106 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.490165 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.490184 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.490207 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.490226 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.593589 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.593653 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.593670 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.593699 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.593715 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.696888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.697807 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.697975 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.698185 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.698334 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.801220 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.801568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.801751 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.801910 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.802112 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.905354 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.905414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.905432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.905457 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[4797]: I0104 11:49:48.905475 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.008233 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.008296 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.008314 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.008342 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.008365 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.110968 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.111077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.111094 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.111118 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.111136 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.213930 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.214035 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.214055 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.214080 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.214098 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.317281 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.317352 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.317370 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.317395 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.317413 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.420875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.420944 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.420963 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.421024 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.421043 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.473818 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:49 crc kubenswrapper[4797]: E0104 11:49:49.474068 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.474186 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:49 crc kubenswrapper[4797]: E0104 11:49:49.474391 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.524875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.524939 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.524956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.524982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.525038 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.628358 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.628425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.628442 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.628467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.628484 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.732440 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.732487 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.732506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.732529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.732547 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.835592 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.835635 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.835646 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.835662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.835673 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.939207 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.939270 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.939288 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.939317 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[4797]: I0104 11:49:49.939334 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.042063 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.042127 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.042145 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.042170 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.042188 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.145578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.145643 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.145668 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.145711 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.145736 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.248762 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.248908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.248931 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.248956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.249036 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.351641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.351678 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.351689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.351704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.351715 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.454604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.454662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.454675 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.454697 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.454736 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.473512 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.473715 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:50 crc kubenswrapper[4797]: E0104 11:49:50.473877 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:50 crc kubenswrapper[4797]: E0104 11:49:50.474068 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.557558 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.557971 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.558249 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.558469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.558779 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.663237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.663284 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.663302 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.663329 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.663347 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.768331 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.768403 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.768426 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.768456 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.768477 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.871721 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.871968 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.872014 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.872042 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.872060 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.980781 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.980872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.980885 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.980905 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[4797]: I0104 11:49:50.980956 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.083513 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.083566 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.083584 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.083608 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.083625 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.186154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.186190 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.186202 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.186220 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.186233 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.289596 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.289653 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.289666 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.289689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.289705 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.392366 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.392433 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.392449 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.392470 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.392487 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.473624 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:51 crc kubenswrapper[4797]: E0104 11:49:51.473772 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.473857 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:51 crc kubenswrapper[4797]: E0104 11:49:51.474149 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496042 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"763698b3-a4e4-4688-a62e-73e141ea3ec1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f620dc8276a223643e71885447fb5a4627b98c7a612103bba1ee2dc913dac493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cacbd556a4b810db08ded0493bbdcfc60829446cc97a0041a47277a33710d47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa06d09d0f64de658cf52e2d54f0dcde622864ba94573e3cd62a73cf075bf5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0222ae077c2de9f5097982b8951fb91d5da0f3bab4688bb8626befeb07b3baaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496399 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496516 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.496576 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.517401 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad07634b3ac878347efb1578d3d7b3a806879dacb610e19ed5464307b3ba605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.541564 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xwctk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91fac858-36ec-4a4b-ba0d-014f6b96b421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:28Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf\\\\n2026-01-04T11:48:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_36ad49b5-5d72-42f4-90d6-a16eb68fb1bf to /host/opt/cni/bin/\\\\n2026-01-04T11:48:43Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:43Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gczkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xwctk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.588256 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b765f232-404c-4b96-8190-376d4104facc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:42Z\\\",\\\"message\\\":\\\"309257 6798 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:49:42.309298 6798 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:49:42.309332 6798 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0104 11:49:42.309345 6798 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:49:42.309365 6798 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:49:42.309368 6798 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:49:42.309374 6798 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:49:42.309375 6798 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:49:42.309383 6798 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:49:42.309391 6798 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:49:42.309396 6798 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:49:42.309400 6798 factory.go:656] Stopping watch factory\\\\nI0104 11:49:42.309417 6798 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:49:42.309449 6798 handler.go:208] Removed *v1.Node event handler 7\\\\nI0104 11:49:42.309471 6798 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:49:42.309565 6798 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62nns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-thvnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.599957 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.600059 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.600080 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.600106 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.600125 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.608933 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f279bbd-812a-4617-b821-852c35954cb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac3b09bf892c9e8521755aa938d57dc47a4c84d5e3c6fdf99b9ca19fe3facd7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mkzss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2dbq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.626386 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af260a22-88fa-4b83-b3c1-c45b5b8077fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62e4ba103f65b5553aadfc5704a47c082736d3c8c105d3af379e628146172be8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d028001ecc3a35ef3a28af9a5d2a92ccad372f03d31b16c87ee6f45377fb223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d028001ecc3a35ef3a28af9a5d2a92ccad372f03d31b16c87ee6f45377fb223\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.648421 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e454306-c21c-4500-ba20-791456b29fba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39af7cbee6a5c1a6ce794a8cfe73cc4200cae7f627083457a3873ecf0dbbb1d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1f869085a9da97655eff04015508dfb6fa9fb80836780b3236fc8a4abf73e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5fd72e88b88f3ce14e09fdb0464938b2cf352972f3f3d7720d8d62322655903\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.669261 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.690408 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3519e3f389e04fcd0ddc9bee7fd1680dd7ba834369d3a9fc2535d47b4d8dc241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://042112178ebf23070755fede67537c8efd643dcbaefa17f432aa16c288095ffe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.703260 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.703359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.703384 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.703458 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.703480 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.709653 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.730694 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.749875 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c264f05-2fcc-422e-a717-d766b27bfd5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvn4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-v8lzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.773571 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88b14f35-7706-4b2d-91bb-da0f0635076c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:23Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.796370 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://499a29c9cf22ca8936fae4433cdac05e1c7575f901db478ac1833796e71bf616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.806853 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.806909 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.806931 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.806956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.806974 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.814453 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fl747" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd7fc2a-4958-4faf-b6fc-fb75c0553d7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9480a1eae305a4be7d6ec770d885c9dacc2dcb05023322f709bd9f2ed47752c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76c57\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fl747\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.842779 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3287e642-dc09-4bbe-91c4-02904aa821de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6095fe007b237aa2b351454a46d533a572c7d421e084c1715289fbe87ff64be9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f305d773becd94228b98634a2228d048d4cd05bf876a4eab5956573f88f5df9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d1afe4162a46dafa00d47a5c519e8e3edf341465e743fac0fe8aaef7da05e5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36bba975898644dcb4d86952ac2daf299e3501c53ac62d72ac8312993b25d5b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2bd580ce4915e4bb8e3718b70f1c183cb15b5e0ef4727fa72c36c69f06e84bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45509b55414b7df34ac5d929e81c2e683fcf6f76efee2f740016f96d9eead787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a31c1ea936b689785dfc8445da916ec5206a6466976e0f73c9129c622976382\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pvzql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kcsbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.860458 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-m5fj2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a83ea4c4-a12c-4ad5-868e-cc0f09576858\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f480c8cbbef806689513511e4667554322e59fa9636273936820beba635a958\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2gwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-m5fj2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.878507 4797 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8f4bf89-d69a-4f1e-b0e0-464a906d8c02\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f7370a27ae836ba63d1fd8fa1a22a38c6d51eee2d5611372a2ff7507dd7a8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67fc7cb5d39c2265b02deefd7972c810a46a9bcc897a4308370550ee7baaedda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9dwrk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fptjv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:51Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.910319 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.910379 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.910397 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.910422 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[4797]: I0104 11:49:51.910441 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.013639 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.013695 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.013714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.013738 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.013755 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.116771 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.116838 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.116861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.116893 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.116916 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.220358 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.220457 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.220477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.220501 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.220518 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.322956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.323048 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.323074 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.323104 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.323126 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.426502 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.426575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.426599 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.426624 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.426642 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.473191 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.473257 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:52 crc kubenswrapper[4797]: E0104 11:49:52.473364 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:52 crc kubenswrapper[4797]: E0104 11:49:52.473482 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.530441 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.531332 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.531767 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.531796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.531816 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.636463 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.636540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.636558 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.636585 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.636603 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.740308 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.740359 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.740376 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.740401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.740420 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.847676 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.847741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.847764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.847794 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.847819 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.950553 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.950613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.950630 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.950651 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[4797]: I0104 11:49:52.950667 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.053913 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.053978 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.054029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.054057 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.054075 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.157414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.157517 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.157535 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.157559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.157585 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.260777 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.260843 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.260860 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.260884 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.260901 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.363959 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.364071 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.364095 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.364123 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.364146 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.466481 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.466529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.466546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.466567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.466583 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.473337 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.473365 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:53 crc kubenswrapper[4797]: E0104 11:49:53.473564 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:53 crc kubenswrapper[4797]: E0104 11:49:53.473659 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.569108 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.569162 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.569180 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.569201 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.569218 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.672099 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.672173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.672206 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.672262 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.672283 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.775347 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.775417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.775444 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.775473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.775494 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.878537 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.878588 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.878603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.878629 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.878651 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.981598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.981689 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.981715 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.981746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[4797]: I0104 11:49:53.981766 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.084862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.084917 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.084935 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.084958 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.084977 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.187862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.187903 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.187920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.187942 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.187959 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.291868 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.291935 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.291953 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.291980 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.292023 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.395160 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.395246 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.395270 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.395301 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.395326 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.474142 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.474192 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:54 crc kubenswrapper[4797]: E0104 11:49:54.474326 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:54 crc kubenswrapper[4797]: E0104 11:49:54.474472 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.499029 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.499085 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.499110 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.499141 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.499165 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.602515 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.602559 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.602575 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.602597 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.602613 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.705377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.705412 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.705429 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.705452 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.705469 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.808567 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.808613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.808633 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.808690 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.808708 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.911789 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.911851 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.911873 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.911897 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[4797]: I0104 11:49:54.911913 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.015324 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.015383 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.015401 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.015425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.015444 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.118834 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.118897 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.118923 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.118951 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.118973 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.125749 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.125803 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.125825 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.125850 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.125870 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.146818 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.152544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.152604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.152625 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.152655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.152677 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.173112 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.178425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.178473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.178490 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.178511 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.178528 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.199980 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.205054 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.205107 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.205126 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.205149 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.205166 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.225823 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.231572 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.231632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.231655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.231682 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.231705 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.253663 4797 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e6c2e325-522a-4f6c-bbaa-70b27798188f\\\",\\\"systemUUID\\\":\\\"b11c24b9-47b3-405e-94d5-79769a53822b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.253884 4797 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.256341 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.256390 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.256406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.256431 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.256479 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.360475 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.360592 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.360622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.360652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.360673 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.463916 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.463982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.464040 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.464069 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.464089 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.474156 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.474278 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.474760 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:55 crc kubenswrapper[4797]: E0104 11:49:55.474745 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.566835 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.566899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.566938 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.566969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.567764 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.670677 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.670725 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.670741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.670764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.670781 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.774316 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.774377 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.774396 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.774418 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.774439 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.876833 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.876903 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.876926 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.876956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.876981 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.979926 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.980024 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.980044 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.980067 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[4797]: I0104 11:49:55.980086 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.083336 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.083402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.083425 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.083454 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.083476 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.186610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.186696 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.186720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.186750 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.186772 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.289662 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.289737 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.289758 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.289779 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.289796 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.392755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.392822 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.392839 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.392864 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.392881 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.473906 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.473970 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:56 crc kubenswrapper[4797]: E0104 11:49:56.474171 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:56 crc kubenswrapper[4797]: E0104 11:49:56.474367 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.496275 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.496366 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.496389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.496414 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.496429 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.599704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.599742 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.599755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.599775 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.599788 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.703641 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.703710 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.703727 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.703793 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.703813 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.806610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.806679 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.806696 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.806720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.806738 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.909632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.909688 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.909704 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.909727 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[4797]: I0104 11:49:56.909744 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.012974 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.013060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.013077 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.013100 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.013128 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.116224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.116302 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.116327 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.116358 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.116384 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.219172 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.219224 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.219237 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.219259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.219272 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.321796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.321822 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.321831 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.321845 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.321853 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.425211 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.425278 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.425298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.425322 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.425338 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.473543 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:57 crc kubenswrapper[4797]: E0104 11:49:57.473711 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.473754 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:57 crc kubenswrapper[4797]: E0104 11:49:57.474399 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.474847 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 11:49:57 crc kubenswrapper[4797]: E0104 11:49:57.475138 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.528669 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.528727 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.528746 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.528772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.528789 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.632597 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.632686 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.632709 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.632740 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.632766 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.735492 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.735547 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.735570 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.735599 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.735620 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.838519 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.838573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.838598 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.838627 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.838647 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.942076 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.942137 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.942154 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.942178 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[4797]: I0104 11:49:57.942198 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.045956 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.046060 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.046083 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.046116 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.046140 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.149853 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.149924 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.149953 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.149983 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.150034 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.252821 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.252879 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.252897 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.252927 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.252944 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.355896 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.355961 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.355979 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.356031 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.356051 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.458977 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.459074 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.459096 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.459122 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.459140 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.473913 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.473947 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:58 crc kubenswrapper[4797]: E0104 11:49:58.474119 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:58 crc kubenswrapper[4797]: E0104 11:49:58.474257 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.563389 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.563450 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.563467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.563492 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.563509 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.666977 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.667100 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.667123 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.667496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.667518 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.770297 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.770352 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.770368 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.770390 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.770409 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.873402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.873452 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.873469 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.873491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.873508 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.976713 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.976818 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.976851 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.976873 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[4797]: I0104 11:49:58.976889 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.079544 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.079604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.079622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.079664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.079683 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.182467 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.182532 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.182552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.182578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.182633 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.286102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.286173 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.286200 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.286233 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.286257 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.389390 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.389454 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.389473 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.389497 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.389528 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.473979 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.474582 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:59 crc kubenswrapper[4797]: E0104 11:49:59.474967 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:49:59 crc kubenswrapper[4797]: E0104 11:49:59.475268 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.492862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.493198 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.493371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.493506 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.493657 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.597476 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.597531 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.597548 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.597573 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.597591 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.700757 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.700814 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.700831 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.700858 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.700876 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.804056 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.804148 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.804169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.804197 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.804217 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.883966 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:49:59 crc kubenswrapper[4797]: E0104 11:49:59.884324 4797 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:59 crc kubenswrapper[4797]: E0104 11:49:59.884433 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs podName:4c264f05-2fcc-422e-a717-d766b27bfd5b nodeName:}" failed. No retries permitted until 2026-01-04 11:51:03.884400067 +0000 UTC m=+162.741586836 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs") pod "network-metrics-daemon-v8lzg" (UID: "4c264f05-2fcc-422e-a717-d766b27bfd5b") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.906708 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.906755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.906771 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.906795 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[4797]: I0104 11:49:59.906812 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.010240 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.010287 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.010308 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.010328 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.010345 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.113179 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.113229 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.113248 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.113274 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.113293 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.216292 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.216354 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.216371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.216395 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.216415 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.319853 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.319920 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.319941 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.319967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.320028 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.422718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.422796 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.422820 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.422848 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.422869 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.473756 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:00 crc kubenswrapper[4797]: E0104 11:50:00.474438 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.476422 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:00 crc kubenswrapper[4797]: E0104 11:50:00.480193 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.499258 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.525512 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.525632 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.525661 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.525693 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.525718 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.629207 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.629258 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.629278 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.629300 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.629318 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.732095 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.732505 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.732714 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.732929 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.733180 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.836528 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.836586 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.836608 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.836635 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.836655 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.939776 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.939864 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.939879 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.939905 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[4797]: I0104 11:50:00.939922 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.043138 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.043202 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.043221 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.043246 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.043266 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.146539 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.146594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.146613 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.146637 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.146655 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.249872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.249915 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.249925 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.249943 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.249954 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.353764 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.353862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.353880 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.353905 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.353924 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.457830 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.457904 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.457921 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.457982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.458035 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.473319 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.473434 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:01 crc kubenswrapper[4797]: E0104 11:50:01.473482 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:01 crc kubenswrapper[4797]: E0104 11:50:01.473955 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.527567 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=1.5275352070000001 podStartE2EDuration="1.527535207s" podCreationTimestamp="2026-01-04 11:50:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.524746844 +0000 UTC m=+100.381933593" watchObservedRunningTime="2026-01-04 11:50:01.527535207 +0000 UTC m=+100.384721956" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.548882 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podStartSLOduration=80.548831232 podStartE2EDuration="1m20.548831232s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.548705299 +0000 UTC m=+100.405892038" watchObservedRunningTime="2026-01-04 11:50:01.548831232 +0000 UTC m=+100.406017981" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.563665 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.563731 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.563748 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.563772 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.563792 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.667031 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.667105 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.667118 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.667136 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.667159 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.670963 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.670948397 podStartE2EDuration="17.670948397s" podCreationTimestamp="2026-01-04 11:49:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.670554537 +0000 UTC m=+100.527741296" watchObservedRunningTime="2026-01-04 11:50:01.670948397 +0000 UTC m=+100.528135116" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.714664 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=77.714646417 podStartE2EDuration="1m17.714646417s" podCreationTimestamp="2026-01-04 11:48:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.698545027 +0000 UTC m=+100.555731726" watchObservedRunningTime="2026-01-04 11:50:01.714646417 +0000 UTC m=+100.571833116" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.735679 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-fl747" podStartSLOduration=80.735658385 podStartE2EDuration="1m20.735658385s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.715035537 +0000 UTC m=+100.572222246" watchObservedRunningTime="2026-01-04 11:50:01.735658385 +0000 UTC m=+100.592845094" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.754564 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-kcsbk" podStartSLOduration=80.754546857 podStartE2EDuration="1m20.754546857s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.741728983 +0000 UTC m=+100.598915702" watchObservedRunningTime="2026-01-04 11:50:01.754546857 +0000 UTC m=+100.611733566" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.765093 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-m5fj2" podStartSLOduration=80.765073062 podStartE2EDuration="1m20.765073062s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.754945928 +0000 UTC m=+100.612132627" watchObservedRunningTime="2026-01-04 11:50:01.765073062 +0000 UTC m=+100.622259771" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.769610 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.769645 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.769654 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.769667 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.769676 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.776782 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fptjv" podStartSLOduration=80.776759756 podStartE2EDuration="1m20.776759756s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.765567885 +0000 UTC m=+100.622754604" watchObservedRunningTime="2026-01-04 11:50:01.776759756 +0000 UTC m=+100.633946465" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.795194 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=83.795175987 podStartE2EDuration="1m23.795175987s" podCreationTimestamp="2026-01-04 11:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.793604606 +0000 UTC m=+100.650791315" watchObservedRunningTime="2026-01-04 11:50:01.795175987 +0000 UTC m=+100.652362696" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.838881 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-xwctk" podStartSLOduration=80.838855096 podStartE2EDuration="1m20.838855096s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.818451104 +0000 UTC m=+100.675637833" watchObservedRunningTime="2026-01-04 11:50:01.838855096 +0000 UTC m=+100.696041805" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.848808 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.848783875 podStartE2EDuration="51.848783875s" podCreationTimestamp="2026-01-04 11:49:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.84785384 +0000 UTC m=+100.705040569" watchObservedRunningTime="2026-01-04 11:50:01.848783875 +0000 UTC m=+100.705970584" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.872347 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.872383 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.872392 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.872406 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.872415 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.974601 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.974645 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.974656 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.974671 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:01 crc kubenswrapper[4797]: I0104 11:50:01.974682 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:01Z","lastTransitionTime":"2026-01-04T11:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.077816 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.077908 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.077947 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.077982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.078044 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.180540 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.180603 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.180621 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.180652 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.180669 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.283787 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.283833 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.283844 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.283862 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.283872 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.386365 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.386408 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.386417 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.386432 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.386441 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.473668 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.473778 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:02 crc kubenswrapper[4797]: E0104 11:50:02.473897 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:02 crc kubenswrapper[4797]: E0104 11:50:02.474044 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.489166 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.489264 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.489287 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.489318 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.489341 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.592462 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.592529 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.592546 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.592568 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.592586 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.695840 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.696298 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.696491 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.696664 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.696847 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.800273 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.800321 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.800346 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.800371 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.800390 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.907021 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.907111 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.907131 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.907157 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:02 crc kubenswrapper[4797]: I0104 11:50:02.907185 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:02Z","lastTransitionTime":"2026-01-04T11:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.011183 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.011251 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.011268 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.011294 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.011313 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.114402 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.114474 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.114492 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.114522 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.114541 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.218120 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.218203 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.218226 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.218259 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.218278 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.321655 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.321726 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.321745 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.321771 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.321790 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.424622 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.424694 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.424720 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.424751 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.424773 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.474211 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.474464 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:03 crc kubenswrapper[4797]: E0104 11:50:03.474600 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:03 crc kubenswrapper[4797]: E0104 11:50:03.474750 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.528239 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.528289 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.528307 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.528330 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.528347 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.630503 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.630574 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.630594 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.630616 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.630635 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.733801 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.733858 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.733875 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.733899 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.733916 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.836967 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.837083 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.837110 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.837141 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.837162 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.939766 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.939833 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.939851 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.939876 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:03 crc kubenswrapper[4797]: I0104 11:50:03.939895 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:03Z","lastTransitionTime":"2026-01-04T11:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.042496 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.042561 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.042578 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.042608 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.042625 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.146102 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.146183 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.146223 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.146261 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.146290 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.248982 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.249111 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.249136 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.249169 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.249192 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.351648 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.351701 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.351718 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.351741 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.351758 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.454817 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.454863 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.454872 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.454888 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.454897 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.473104 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:04 crc kubenswrapper[4797]: E0104 11:50:04.473216 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.473259 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:04 crc kubenswrapper[4797]: E0104 11:50:04.473304 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.558195 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.558260 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.558270 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.558283 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.558292 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.660602 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.660647 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.660658 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.660676 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.660688 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.763859 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.763919 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.763930 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.763966 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.763980 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.867705 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.867768 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.867861 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.868199 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.868239 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.971256 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.971320 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.971339 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.971367 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:04 crc kubenswrapper[4797]: I0104 11:50:04.971384 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:04Z","lastTransitionTime":"2026-01-04T11:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.074015 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.074263 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.074331 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.074399 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.074461 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:05Z","lastTransitionTime":"2026-01-04T11:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.177849 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.177917 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.177942 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.177969 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.178054 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:05Z","lastTransitionTime":"2026-01-04T11:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.282673 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.282755 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.282781 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.282809 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.282826 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:05Z","lastTransitionTime":"2026-01-04T11:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.363477 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.363552 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.363576 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.363604 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.363625 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:05Z","lastTransitionTime":"2026-01-04T11:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.392125 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.392176 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.392193 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.392218 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.392237 4797 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:05Z","lastTransitionTime":"2026-01-04T11:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.436019 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw"] Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.436906 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.444697 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.444876 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.445935 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/acd27641-351f-4d49-a37d-c80aaacb1e96-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.446082 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acd27641-351f-4d49-a37d-c80aaacb1e96-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.446171 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.446256 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.446336 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/acd27641-351f-4d49-a37d-c80aaacb1e96-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.447233 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.451425 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.473855 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:05 crc kubenswrapper[4797]: E0104 11:50:05.474174 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.474312 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:05 crc kubenswrapper[4797]: E0104 11:50:05.474457 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547544 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/acd27641-351f-4d49-a37d-c80aaacb1e96-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547681 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/acd27641-351f-4d49-a37d-c80aaacb1e96-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547730 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acd27641-351f-4d49-a37d-c80aaacb1e96-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547771 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547808 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.547886 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.549184 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/acd27641-351f-4d49-a37d-c80aaacb1e96-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.549269 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/acd27641-351f-4d49-a37d-c80aaacb1e96-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.558898 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acd27641-351f-4d49-a37d-c80aaacb1e96-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.578872 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/acd27641-351f-4d49-a37d-c80aaacb1e96-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4flsw\" (UID: \"acd27641-351f-4d49-a37d-c80aaacb1e96\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:05 crc kubenswrapper[4797]: I0104 11:50:05.765363 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" Jan 04 11:50:06 crc kubenswrapper[4797]: I0104 11:50:06.118698 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" event={"ID":"acd27641-351f-4d49-a37d-c80aaacb1e96","Type":"ContainerStarted","Data":"c4ed0cd9551fb22180e2314ccfcc21ae21b96d86b5f6f59277fb7de3ffbe6dbf"} Jan 04 11:50:06 crc kubenswrapper[4797]: I0104 11:50:06.119121 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" event={"ID":"acd27641-351f-4d49-a37d-c80aaacb1e96","Type":"ContainerStarted","Data":"cf0ea1f0d6fb05a77fcedab0fcd7783c39b0d646a9d7f0d4c0872aed7e0018b1"} Jan 04 11:50:06 crc kubenswrapper[4797]: I0104 11:50:06.142149 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4flsw" podStartSLOduration=85.142120339 podStartE2EDuration="1m25.142120339s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:06.141449461 +0000 UTC m=+104.998636220" watchObservedRunningTime="2026-01-04 11:50:06.142120339 +0000 UTC m=+104.999307118" Jan 04 11:50:06 crc kubenswrapper[4797]: I0104 11:50:06.473248 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:06 crc kubenswrapper[4797]: I0104 11:50:06.473309 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:06 crc kubenswrapper[4797]: E0104 11:50:06.473422 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:06 crc kubenswrapper[4797]: E0104 11:50:06.473585 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:07 crc kubenswrapper[4797]: I0104 11:50:07.474115 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:07 crc kubenswrapper[4797]: E0104 11:50:07.474368 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:07 crc kubenswrapper[4797]: I0104 11:50:07.474441 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:07 crc kubenswrapper[4797]: E0104 11:50:07.474639 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:08 crc kubenswrapper[4797]: I0104 11:50:08.473253 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:08 crc kubenswrapper[4797]: I0104 11:50:08.473256 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:08 crc kubenswrapper[4797]: E0104 11:50:08.473449 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:08 crc kubenswrapper[4797]: E0104 11:50:08.473656 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:09 crc kubenswrapper[4797]: I0104 11:50:09.473083 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:09 crc kubenswrapper[4797]: I0104 11:50:09.473159 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:09 crc kubenswrapper[4797]: E0104 11:50:09.473262 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:09 crc kubenswrapper[4797]: E0104 11:50:09.473433 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:10 crc kubenswrapper[4797]: I0104 11:50:10.473660 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:10 crc kubenswrapper[4797]: E0104 11:50:10.473803 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:10 crc kubenswrapper[4797]: I0104 11:50:10.473873 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:10 crc kubenswrapper[4797]: E0104 11:50:10.474123 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:11 crc kubenswrapper[4797]: I0104 11:50:11.473509 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:11 crc kubenswrapper[4797]: I0104 11:50:11.473605 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:11 crc kubenswrapper[4797]: E0104 11:50:11.475098 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:11 crc kubenswrapper[4797]: E0104 11:50:11.475170 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:11 crc kubenswrapper[4797]: I0104 11:50:11.475696 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 11:50:11 crc kubenswrapper[4797]: E0104 11:50:11.475857 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-thvnv_openshift-ovn-kubernetes(b765f232-404c-4b96-8190-376d4104facc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" Jan 04 11:50:12 crc kubenswrapper[4797]: I0104 11:50:12.473551 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:12 crc kubenswrapper[4797]: I0104 11:50:12.473591 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:12 crc kubenswrapper[4797]: E0104 11:50:12.473686 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:12 crc kubenswrapper[4797]: E0104 11:50:12.473836 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:13 crc kubenswrapper[4797]: I0104 11:50:13.473978 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:13 crc kubenswrapper[4797]: I0104 11:50:13.474063 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:13 crc kubenswrapper[4797]: E0104 11:50:13.474689 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:13 crc kubenswrapper[4797]: E0104 11:50:13.474514 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:14 crc kubenswrapper[4797]: I0104 11:50:14.473507 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:14 crc kubenswrapper[4797]: I0104 11:50:14.473546 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:14 crc kubenswrapper[4797]: E0104 11:50:14.473742 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:14 crc kubenswrapper[4797]: E0104 11:50:14.473868 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.157133 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/1.log" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.158330 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/0.log" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.158398 4797 generic.go:334] "Generic (PLEG): container finished" podID="91fac858-36ec-4a4b-ba0d-014f6b96b421" containerID="65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f" exitCode=1 Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.158453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerDied","Data":"65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f"} Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.158526 4797 scope.go:117] "RemoveContainer" containerID="f7372b1dfefe41cac1d9a1f2e26a798c60ec2b1ab56599fd26cb5bd065873a6b" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.159140 4797 scope.go:117] "RemoveContainer" containerID="65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f" Jan 04 11:50:15 crc kubenswrapper[4797]: E0104 11:50:15.159421 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-xwctk_openshift-multus(91fac858-36ec-4a4b-ba0d-014f6b96b421)\"" pod="openshift-multus/multus-xwctk" podUID="91fac858-36ec-4a4b-ba0d-014f6b96b421" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.473940 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:15 crc kubenswrapper[4797]: E0104 11:50:15.474129 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:15 crc kubenswrapper[4797]: I0104 11:50:15.474531 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:15 crc kubenswrapper[4797]: E0104 11:50:15.474785 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:16 crc kubenswrapper[4797]: I0104 11:50:16.164872 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/1.log" Jan 04 11:50:16 crc kubenswrapper[4797]: I0104 11:50:16.473284 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:16 crc kubenswrapper[4797]: I0104 11:50:16.473347 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:16 crc kubenswrapper[4797]: E0104 11:50:16.473455 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:16 crc kubenswrapper[4797]: E0104 11:50:16.473602 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:17 crc kubenswrapper[4797]: I0104 11:50:17.475324 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:17 crc kubenswrapper[4797]: I0104 11:50:17.475327 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:17 crc kubenswrapper[4797]: E0104 11:50:17.475538 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:17 crc kubenswrapper[4797]: E0104 11:50:17.475618 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:18 crc kubenswrapper[4797]: I0104 11:50:18.473715 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:18 crc kubenswrapper[4797]: E0104 11:50:18.473879 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:18 crc kubenswrapper[4797]: I0104 11:50:18.473715 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:18 crc kubenswrapper[4797]: E0104 11:50:18.474090 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:19 crc kubenswrapper[4797]: I0104 11:50:19.473759 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:19 crc kubenswrapper[4797]: E0104 11:50:19.473957 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:19 crc kubenswrapper[4797]: I0104 11:50:19.474049 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:19 crc kubenswrapper[4797]: E0104 11:50:19.474261 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:20 crc kubenswrapper[4797]: I0104 11:50:20.474025 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:20 crc kubenswrapper[4797]: I0104 11:50:20.474089 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:20 crc kubenswrapper[4797]: E0104 11:50:20.474252 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:20 crc kubenswrapper[4797]: E0104 11:50:20.474403 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:21 crc kubenswrapper[4797]: E0104 11:50:21.450847 4797 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 04 11:50:21 crc kubenswrapper[4797]: I0104 11:50:21.473160 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:21 crc kubenswrapper[4797]: E0104 11:50:21.475153 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:21 crc kubenswrapper[4797]: I0104 11:50:21.475199 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:21 crc kubenswrapper[4797]: E0104 11:50:21.475847 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:21 crc kubenswrapper[4797]: E0104 11:50:21.572652 4797 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 11:50:22 crc kubenswrapper[4797]: I0104 11:50:22.473784 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:22 crc kubenswrapper[4797]: E0104 11:50:22.474072 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:22 crc kubenswrapper[4797]: I0104 11:50:22.473809 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:22 crc kubenswrapper[4797]: E0104 11:50:22.474718 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:22 crc kubenswrapper[4797]: I0104 11:50:22.475302 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.206065 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/3.log" Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.208652 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerStarted","Data":"524c7a1ca24d99795031a9d26001fe5ca859ae7d5a8f3afc75b12818544455b9"} Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.209385 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.250775 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podStartSLOduration=102.250752023 podStartE2EDuration="1m42.250752023s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:23.248314719 +0000 UTC m=+122.105501468" watchObservedRunningTime="2026-01-04 11:50:23.250752023 +0000 UTC m=+122.107938772" Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.316632 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-v8lzg"] Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.316753 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:23 crc kubenswrapper[4797]: E0104 11:50:23.316845 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:23 crc kubenswrapper[4797]: I0104 11:50:23.473889 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:23 crc kubenswrapper[4797]: E0104 11:50:23.474210 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:24 crc kubenswrapper[4797]: I0104 11:50:24.473825 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:24 crc kubenswrapper[4797]: I0104 11:50:24.473949 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:24 crc kubenswrapper[4797]: E0104 11:50:24.474433 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:24 crc kubenswrapper[4797]: E0104 11:50:24.474699 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:25 crc kubenswrapper[4797]: I0104 11:50:25.473119 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:25 crc kubenswrapper[4797]: E0104 11:50:25.473298 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:25 crc kubenswrapper[4797]: I0104 11:50:25.473587 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:25 crc kubenswrapper[4797]: E0104 11:50:25.473696 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:26 crc kubenswrapper[4797]: I0104 11:50:26.473122 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:26 crc kubenswrapper[4797]: I0104 11:50:26.473122 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:26 crc kubenswrapper[4797]: E0104 11:50:26.473343 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:26 crc kubenswrapper[4797]: E0104 11:50:26.473416 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:26 crc kubenswrapper[4797]: E0104 11:50:26.574603 4797 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 11:50:27 crc kubenswrapper[4797]: I0104 11:50:27.474198 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:27 crc kubenswrapper[4797]: I0104 11:50:27.474332 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:27 crc kubenswrapper[4797]: E0104 11:50:27.474438 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:27 crc kubenswrapper[4797]: E0104 11:50:27.474848 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:27 crc kubenswrapper[4797]: I0104 11:50:27.475552 4797 scope.go:117] "RemoveContainer" containerID="65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f" Jan 04 11:50:28 crc kubenswrapper[4797]: I0104 11:50:28.230034 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/1.log" Jan 04 11:50:28 crc kubenswrapper[4797]: I0104 11:50:28.230105 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerStarted","Data":"06d0b4ccf7ed92165350b78728e150e285b69b3ba50371a453f160262ef42e1c"} Jan 04 11:50:28 crc kubenswrapper[4797]: I0104 11:50:28.473397 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:28 crc kubenswrapper[4797]: I0104 11:50:28.473481 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:28 crc kubenswrapper[4797]: E0104 11:50:28.473934 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:28 crc kubenswrapper[4797]: E0104 11:50:28.474129 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:29 crc kubenswrapper[4797]: I0104 11:50:29.473609 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:29 crc kubenswrapper[4797]: I0104 11:50:29.473737 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:29 crc kubenswrapper[4797]: E0104 11:50:29.473849 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:29 crc kubenswrapper[4797]: E0104 11:50:29.473938 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:30 crc kubenswrapper[4797]: I0104 11:50:30.473766 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:30 crc kubenswrapper[4797]: I0104 11:50:30.473831 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:30 crc kubenswrapper[4797]: E0104 11:50:30.473965 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:30 crc kubenswrapper[4797]: E0104 11:50:30.474165 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:31 crc kubenswrapper[4797]: I0104 11:50:31.473609 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:31 crc kubenswrapper[4797]: I0104 11:50:31.473642 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:31 crc kubenswrapper[4797]: E0104 11:50:31.476349 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:31 crc kubenswrapper[4797]: E0104 11:50:31.476475 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-v8lzg" podUID="4c264f05-2fcc-422e-a717-d766b27bfd5b" Jan 04 11:50:32 crc kubenswrapper[4797]: I0104 11:50:32.473526 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:32 crc kubenswrapper[4797]: I0104 11:50:32.473526 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:32 crc kubenswrapper[4797]: I0104 11:50:32.476406 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 04 11:50:32 crc kubenswrapper[4797]: I0104 11:50:32.476460 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.474099 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.474167 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.476928 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.477386 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.477868 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 04 11:50:33 crc kubenswrapper[4797]: I0104 11:50:33.477581 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.019673 4797 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.052595 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.053303 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.053890 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.054022 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.054378 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.054391 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.059850 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.060412 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.062747 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.062783 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.063047 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.063075 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.063795 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.065438 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.065929 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kz2nv"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.066131 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.066731 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.070094 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.071162 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.072535 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mhvp5"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.073343 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074404 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074533 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074593 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074673 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074673 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074731 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074750 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074796 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.074907 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.075251 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9kvs8"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.076557 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.077521 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.077687 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.077692 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.078087 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.078177 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.078178 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.078669 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.080749 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.081148 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.081506 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.081698 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.081723 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082154 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082172 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082257 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082327 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082514 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082615 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082730 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.082860 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.083070 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.083133 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.083073 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.083441 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.088912 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.105210 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pj8fz"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.106692 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mff6w"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.120633 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.121575 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.122216 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.123566 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-vg9x9"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.123932 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124196 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124335 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124349 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124479 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124613 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124703 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124742 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.124869 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.125066 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.125190 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.125515 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.125721 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.126027 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.126863 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4dcv7"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.127284 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.130442 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.130647 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.130849 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.130450 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131080 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131090 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131146 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131146 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131423 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131469 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131541 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131612 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131613 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131637 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.131614 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.133515 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.134046 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.135347 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gzw7t"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.136153 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137028 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-machine-approver-tls\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137099 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66a0e14f-90de-4516-9642-14c4e6bbf9a1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137134 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a0e14f-90de-4516-9642-14c4e6bbf9a1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137169 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137198 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-dir\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137234 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk27w\" (UniqueName: \"kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137267 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpt6h\" (UniqueName: \"kubernetes.io/projected/230f3952-4d78-4975-9304-0ae7ee0ec87e-kube-api-access-lpt6h\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137296 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-config\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137327 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137362 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-encryption-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137396 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrmps\" (UniqueName: \"kubernetes.io/projected/c62ca291-d5b1-4491-94d5-a6018ead98bc-kube-api-access-lrmps\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137427 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137457 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137487 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137522 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137554 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzvf9\" (UniqueName: \"kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137586 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137617 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137653 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137686 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4858z\" (UniqueName: \"kubernetes.io/projected/d7c0be23-efaf-490e-a685-d3cf605dd7ac-kube-api-access-4858z\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137716 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/48f0c0da-d926-4b80-bacf-6dcfd2298456-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137749 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137781 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137811 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-serving-cert\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137916 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-policies\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137948 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9g9t\" (UniqueName: \"kubernetes.io/projected/c540aa4c-7310-40c0-b929-11b5d21e59fb-kube-api-access-n9g9t\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.137964 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f0c0da-d926-4b80-bacf-6dcfd2298456-serving-cert\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138172 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138195 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-encryption-config\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138219 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-client\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138240 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-serving-cert\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138254 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7c0be23-efaf-490e-a685-d3cf605dd7ac-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138269 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-config\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138283 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138320 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138367 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62ca291-d5b1-4491-94d5-a6018ead98bc-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138386 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc78q\" (UniqueName: \"kubernetes.io/projected/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-kube-api-access-dc78q\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138442 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-auth-proxy-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138461 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spq7s\" (UniqueName: \"kubernetes.io/projected/48f0c0da-d926-4b80-bacf-6dcfd2298456-kube-api-access-spq7s\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138477 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjwf2\" (UniqueName: \"kubernetes.io/projected/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-kube-api-access-zjwf2\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138522 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138542 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138556 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138575 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4rqb\" (UniqueName: \"kubernetes.io/projected/bac1cac5-6f43-495f-9a7b-b5a1e13d5898-kube-api-access-j4rqb\") pod \"downloads-7954f5f757-vg9x9\" (UID: \"bac1cac5-6f43-495f-9a7b-b5a1e13d5898\") " pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138593 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zwxd\" (UniqueName: \"kubernetes.io/projected/7db1463e-4658-4160-bdf2-76b60a23c85e-kube-api-access-4zwxd\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138616 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138634 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138673 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7db1463e-4658-4160-bdf2-76b60a23c85e-serving-cert\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138690 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit-dir\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138704 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138722 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-service-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138737 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzvmp\" (UniqueName: \"kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138772 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138790 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-images\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138816 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138840 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138862 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138890 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138922 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138945 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tw8f\" (UniqueName: \"kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.138969 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-image-import-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139047 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq549\" (UniqueName: \"kubernetes.io/projected/107f145c-ad17-499f-ae0b-55c34e0d04e6-kube-api-access-tq549\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139077 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-client\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139098 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/230f3952-4d78-4975-9304-0ae7ee0ec87e-serving-cert\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139129 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgps2\" (UniqueName: \"kubernetes.io/projected/66a0e14f-90de-4516-9642-14c4e6bbf9a1-kube-api-access-vgps2\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139155 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-config\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139185 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139225 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-trusted-ca\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139243 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62ca291-d5b1-4491-94d5-a6018ead98bc-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139295 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139339 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-serving-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139364 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139381 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139402 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-node-pullsecrets\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139422 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139462 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139481 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.139495 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.142920 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.143836 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.144404 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.144514 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.144767 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.144869 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.145085 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.145192 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.145373 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.147188 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.147686 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.148206 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.148656 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.148930 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.149097 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.149237 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.149514 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.149852 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.149887 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.150304 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.150690 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.152760 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-nk574"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.154023 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.165445 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.167569 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.175573 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.187774 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188146 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188175 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188340 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188537 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188590 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188881 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.188957 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189186 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189210 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189340 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189403 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189478 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189609 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189689 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189740 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189755 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189794 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189909 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.189973 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.190011 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.191329 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.191377 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.191654 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.193781 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.194182 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.194440 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.194708 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nl9mh"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195124 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195136 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zdnhw"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195447 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195661 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195695 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195668 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195766 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195886 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.195970 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.196599 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.201587 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.202496 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.203292 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.203860 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.204370 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205031 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205256 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205485 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205507 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vs78p"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205709 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205882 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.205904 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.206725 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mhvp5"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.206751 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.207936 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.207951 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.208163 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.208401 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.210862 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.210913 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l8qm2"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.236508 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kz2nv"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.236552 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.236772 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.237276 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.237521 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.238106 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.239042 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.241060 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjwf2\" (UniqueName: \"kubernetes.io/projected/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-kube-api-access-zjwf2\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.241100 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246674 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246732 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246762 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246789 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4rqb\" (UniqueName: \"kubernetes.io/projected/bac1cac5-6f43-495f-9a7b-b5a1e13d5898-kube-api-access-j4rqb\") pod \"downloads-7954f5f757-vg9x9\" (UID: \"bac1cac5-6f43-495f-9a7b-b5a1e13d5898\") " pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246813 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zwxd\" (UniqueName: \"kubernetes.io/projected/7db1463e-4658-4160-bdf2-76b60a23c85e-kube-api-access-4zwxd\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246833 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246855 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246874 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7db1463e-4658-4160-bdf2-76b60a23c85e-serving-cert\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246894 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit-dir\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246913 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-service-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246930 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzvmp\" (UniqueName: \"kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246955 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.246974 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247005 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-images\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247024 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247040 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247062 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247082 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247101 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tw8f\" (UniqueName: \"kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.247120 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-image-import-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250009 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250175 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-service-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250361 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit-dir\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250408 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq549\" (UniqueName: \"kubernetes.io/projected/107f145c-ad17-499f-ae0b-55c34e0d04e6-kube-api-access-tq549\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250460 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-client\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250478 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/230f3952-4d78-4975-9304-0ae7ee0ec87e-serving-cert\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250513 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250520 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgps2\" (UniqueName: \"kubernetes.io/projected/66a0e14f-90de-4516-9642-14c4e6bbf9a1-kube-api-access-vgps2\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250556 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-config\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250577 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250600 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-trusted-ca\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250623 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62ca291-d5b1-4491-94d5-a6018ead98bc-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250642 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250661 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-serving-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250676 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250691 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-node-pullsecrets\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250707 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250729 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250747 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250763 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250767 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-image-import-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250779 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250815 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk27w\" (UniqueName: \"kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250834 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-machine-approver-tls\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250851 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66a0e14f-90de-4516-9642-14c4e6bbf9a1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250868 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a0e14f-90de-4516-9642-14c4e6bbf9a1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250885 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250902 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-dir\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250917 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-encryption-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250934 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpt6h\" (UniqueName: \"kubernetes.io/projected/230f3952-4d78-4975-9304-0ae7ee0ec87e-kube-api-access-lpt6h\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250950 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-config\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.250968 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251000 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251030 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrmps\" (UniqueName: \"kubernetes.io/projected/c62ca291-d5b1-4491-94d5-a6018ead98bc-kube-api-access-lrmps\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251048 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251067 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251086 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251103 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzvf9\" (UniqueName: \"kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251120 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251136 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251153 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251173 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/48f0c0da-d926-4b80-bacf-6dcfd2298456-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251192 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4858z\" (UniqueName: \"kubernetes.io/projected/d7c0be23-efaf-490e-a685-d3cf605dd7ac-kube-api-access-4858z\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251210 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251226 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251243 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9g9t\" (UniqueName: \"kubernetes.io/projected/c540aa4c-7310-40c0-b929-11b5d21e59fb-kube-api-access-n9g9t\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251258 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-serving-cert\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251284 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-policies\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251298 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-encryption-config\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251313 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f0c0da-d926-4b80-bacf-6dcfd2298456-serving-cert\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251331 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251375 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-client\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251380 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251390 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-serving-cert\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251437 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62ca291-d5b1-4491-94d5-a6018ead98bc-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251463 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7c0be23-efaf-490e-a685-d3cf605dd7ac-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251487 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-config\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251509 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251533 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251576 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-serving-ca\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251600 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc78q\" (UniqueName: \"kubernetes.io/projected/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-kube-api-access-dc78q\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251652 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-auth-proxy-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251679 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spq7s\" (UniqueName: \"kubernetes.io/projected/48f0c0da-d926-4b80-bacf-6dcfd2298456-kube-api-access-spq7s\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.252093 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-config\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.254457 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.254703 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/107f145c-ad17-499f-ae0b-55c34e0d04e6-node-pullsecrets\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.255137 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.255178 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7db1463e-4658-4160-bdf2-76b60a23c85e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.255179 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-trusted-ca\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.255675 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.255838 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62ca291-d5b1-4491-94d5-a6018ead98bc-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.256259 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7db1463e-4658-4160-bdf2-76b60a23c85e-serving-cert\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.256285 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-images\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257151 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257208 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257240 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257532 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-serving-cert\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257733 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a0e14f-90de-4516-9642-14c4e6bbf9a1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.257735 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.258092 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.258187 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.258815 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.259174 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.259530 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-config\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260346 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/48f0c0da-d926-4b80-bacf-6dcfd2298456-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260455 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/230f3952-4d78-4975-9304-0ae7ee0ec87e-serving-cert\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260421 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260379 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260693 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260851 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-etcd-client\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.260894 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/230f3952-4d78-4975-9304-0ae7ee0ec87e-config\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261069 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261179 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66a0e14f-90de-4516-9642-14c4e6bbf9a1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261360 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261842 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261916 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.261972 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-dir\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.262573 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.262636 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.262797 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-encryption-config\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263131 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263292 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263299 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263316 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-x2zb8"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263643 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.263645 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.251081 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.264139 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-auth-proxy-config\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.264306 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/107f145c-ad17-499f-ae0b-55c34e0d04e6-audit\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.264366 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7c0be23-efaf-490e-a685-d3cf605dd7ac-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.264646 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.264892 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c540aa4c-7310-40c0-b929-11b5d21e59fb-audit-policies\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.265605 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-encryption-config\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.265682 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.266264 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107f145c-ad17-499f-ae0b-55c34e0d04e6-serving-cert\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.267304 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48f0c0da-d926-4b80-bacf-6dcfd2298456-serving-cert\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.267399 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c540aa4c-7310-40c0-b929-11b5d21e59fb-etcd-client\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.268743 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9lgj6"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.268868 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.270017 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.270256 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mff6w"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.270369 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vg9x9"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.270144 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.271046 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.271765 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-machine-approver-tls\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.272280 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.273416 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gzw7t"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.274350 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.274492 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62ca291-d5b1-4491-94d5-a6018ead98bc-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.274752 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pj8fz"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.275742 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.277461 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.277587 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nl9mh"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.278970 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.280255 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4dcv7"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.281588 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.283777 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-nk574"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.284056 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.287099 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.287127 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.288687 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.290092 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.291419 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.292855 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.294107 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.295372 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87bml"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.295410 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.297462 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9kvs8"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.297566 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.298006 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-25vnt"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.298555 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.299313 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.300599 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.302140 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.303382 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.304733 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.305981 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l8qm2"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.307308 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87bml"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.308764 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9lgj6"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.310155 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vs78p"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.311435 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-25vnt"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.312900 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p"] Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.315936 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.335496 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.355745 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.376323 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.395819 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.416180 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.436323 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.455833 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.475794 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.495645 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.536618 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.556700 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.575799 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.596305 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.616559 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.636308 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.657058 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.675803 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.697087 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.717478 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.736218 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.756973 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.776631 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.796444 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.816259 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.837491 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.856689 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.875942 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.897303 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.916578 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.936561 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.956087 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.977081 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 04 11:50:36 crc kubenswrapper[4797]: I0104 11:50:36.997275 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.016060 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.036127 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.055236 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.076046 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.096972 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.116545 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.136470 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.157215 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.177814 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.195676 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.214677 4797 request.go:700] Waited for 1.01107928s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-metrics-certs-default&limit=500&resourceVersion=0 Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.216270 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.235533 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.255968 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.275661 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.296221 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.316029 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.337224 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.356324 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.376237 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.401852 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.415414 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.436583 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.456867 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.477016 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.498942 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.516200 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.535980 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.556849 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.576705 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.596275 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.617851 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.636276 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.656715 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.677064 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.696701 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.716059 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.736262 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.756498 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.778608 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.796658 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.835558 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.837672 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.876309 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjwf2\" (UniqueName: \"kubernetes.io/projected/d04a9a55-4a32-4b67-8f29-c817d4ec71b9-kube-api-access-zjwf2\") pod \"machine-approver-56656f9798-gcbc4\" (UID: \"d04a9a55-4a32-4b67-8f29-c817d4ec71b9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.899407 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tw8f\" (UniqueName: \"kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f\") pod \"controller-manager-879f6c89f-wt6hc\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.916878 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgps2\" (UniqueName: \"kubernetes.io/projected/66a0e14f-90de-4516-9642-14c4e6bbf9a1-kube-api-access-vgps2\") pod \"openshift-controller-manager-operator-756b6f6bc6-726np\" (UID: \"66a0e14f-90de-4516-9642-14c4e6bbf9a1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.932591 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.942439 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4rqb\" (UniqueName: \"kubernetes.io/projected/bac1cac5-6f43-495f-9a7b-b5a1e13d5898-kube-api-access-j4rqb\") pod \"downloads-7954f5f757-vg9x9\" (UID: \"bac1cac5-6f43-495f-9a7b-b5a1e13d5898\") " pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:37 crc kubenswrapper[4797]: W0104 11:50:37.953498 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd04a9a55_4a32_4b67_8f29_c817d4ec71b9.slice/crio-84ebf01e13d226b1f1c75157c6ceacd42e2893f10b52089d27abdcb78fe11417 WatchSource:0}: Error finding container 84ebf01e13d226b1f1c75157c6ceacd42e2893f10b52089d27abdcb78fe11417: Status 404 returned error can't find the container with id 84ebf01e13d226b1f1c75157c6ceacd42e2893f10b52089d27abdcb78fe11417 Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.973959 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq549\" (UniqueName: \"kubernetes.io/projected/107f145c-ad17-499f-ae0b-55c34e0d04e6-kube-api-access-tq549\") pod \"apiserver-76f77b778f-9kvs8\" (UID: \"107f145c-ad17-499f-ae0b-55c34e0d04e6\") " pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.976108 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zwxd\" (UniqueName: \"kubernetes.io/projected/7db1463e-4658-4160-bdf2-76b60a23c85e-kube-api-access-4zwxd\") pod \"authentication-operator-69f744f599-kz2nv\" (UID: \"7db1463e-4658-4160-bdf2-76b60a23c85e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.986771 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.991463 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spq7s\" (UniqueName: \"kubernetes.io/projected/48f0c0da-d926-4b80-bacf-6dcfd2298456-kube-api-access-spq7s\") pod \"openshift-config-operator-7777fb866f-vvjgp\" (UID: \"48f0c0da-d926-4b80-bacf-6dcfd2298456\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:37 crc kubenswrapper[4797]: I0104 11:50:37.997263 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.010383 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzvmp\" (UniqueName: \"kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp\") pod \"oauth-openshift-558db77b4-pj8fz\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.032567 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk27w\" (UniqueName: \"kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w\") pod \"route-controller-manager-6576b87f9c-6vf5r\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.049940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzvf9\" (UniqueName: \"kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9\") pod \"console-f9d7485db-wftnf\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074379 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074752 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074802 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074829 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sbn9\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074854 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074889 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074912 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.074961 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.076054 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.576036908 +0000 UTC m=+137.433223617 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.095522 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpt6h\" (UniqueName: \"kubernetes.io/projected/230f3952-4d78-4975-9304-0ae7ee0ec87e-kube-api-access-lpt6h\") pod \"console-operator-58897d9998-mff6w\" (UID: \"230f3952-4d78-4975-9304-0ae7ee0ec87e\") " pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.104695 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.112098 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.112827 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4858z\" (UniqueName: \"kubernetes.io/projected/d7c0be23-efaf-490e-a685-d3cf605dd7ac-kube-api-access-4858z\") pod \"cluster-samples-operator-665b6dd947-z6g55\" (UID: \"d7c0be23-efaf-490e-a685-d3cf605dd7ac\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.120719 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.131159 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.134646 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrmps\" (UniqueName: \"kubernetes.io/projected/c62ca291-d5b1-4491-94d5-a6018ead98bc-kube-api-access-lrmps\") pod \"openshift-apiserver-operator-796bbdcf4f-tlvtx\" (UID: \"c62ca291-d5b1-4491-94d5-a6018ead98bc\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.136241 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.144105 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.149868 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.153032 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc78q\" (UniqueName: \"kubernetes.io/projected/0d05b9b3-d6d6-4fcc-9291-1ffac489c644-kube-api-access-dc78q\") pod \"machine-api-operator-5694c8668f-mhvp5\" (UID: \"0d05b9b3-d6d6-4fcc-9291-1ffac489c644\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.156767 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.172491 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9g9t\" (UniqueName: \"kubernetes.io/projected/c540aa4c-7310-40c0-b929-11b5d21e59fb-kube-api-access-n9g9t\") pod \"apiserver-7bbb656c7d-fr4c5\" (UID: \"c540aa4c-7310-40c0-b929-11b5d21e59fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.175906 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.177305 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.177454 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.677434151 +0000 UTC m=+137.534620870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.177484 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-cabundle\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.177507 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf9d4794-e0c1-4200-912a-57e7c34d7250-metrics-tls\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.177528 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.177544 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac79c571-f4ba-4eee-850e-7c3ca6465535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178278 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-profile-collector-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178313 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-stats-auth\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178334 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30b52ed3-65f0-4673-84ae-14b5f07bed22-cert\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178364 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfzqp\" (UniqueName: \"kubernetes.io/projected/30b52ed3-65f0-4673-84ae-14b5f07bed22-kube-api-access-zfzqp\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178386 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-config\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178408 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-service-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178451 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-config-volume\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178471 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e7ecb074-1cfe-433b-a183-12a3ddd85144-tmpfs\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178492 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvftr\" (UniqueName: \"kubernetes.io/projected/204ab34c-de60-427e-be71-d44d8461b8b6-kube-api-access-zvftr\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178515 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-csi-data-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178570 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178594 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178611 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-client\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178628 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ecce101-e9fe-4612-8306-94165bb43460-service-ca-bundle\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.178827 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-srv-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.178948 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.678933471 +0000 UTC m=+137.536120180 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179005 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8frgw\" (UniqueName: \"kubernetes.io/projected/e7ecb074-1cfe-433b-a183-12a3ddd85144-kube-api-access-8frgw\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179026 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cad2fe8-0191-4762-a99c-5206030e7866-serving-cert\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179043 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179077 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c33ab196-8968-49ef-bc23-4b81e9f18d7a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179093 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnqlm\" (UniqueName: \"kubernetes.io/projected/b596a0e4-953f-462f-a68d-58cdfdb6294b-kube-api-access-cnqlm\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179119 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179145 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx8wp\" (UniqueName: \"kubernetes.io/projected/d7f55ff9-695a-468d-8a5a-727c083ec754-kube-api-access-kx8wp\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179181 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0101a1a3-cd94-49dc-8a5b-9876927cdfab-proxy-tls\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179205 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-plugins-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179224 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179439 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rww4\" (UniqueName: \"kubernetes.io/projected/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-kube-api-access-4rww4\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179457 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cad2fe8-0191-4762-a99c-5206030e7866-config\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179490 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkc66\" (UniqueName: \"kubernetes.io/projected/73da5c10-f8a5-4d86-a730-d876d3629e14-kube-api-access-nkc66\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179594 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c33ab196-8968-49ef-bc23-4b81e9f18d7a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179634 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179652 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf9d4794-e0c1-4200-912a-57e7c34d7250-trusted-ca\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179748 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179775 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179823 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/92990496-8dbe-46ae-8ed1-e9820e8d8c83-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179840 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ht4f\" (UniqueName: \"kubernetes.io/projected/2cad2fe8-0191-4762-a99c-5206030e7866-kube-api-access-2ht4f\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179879 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179914 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj7n5\" (UniqueName: \"kubernetes.io/projected/0101a1a3-cd94-49dc-8a5b-9876927cdfab-kube-api-access-lj7n5\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179935 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-default-certificate\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179951 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92990496-8dbe-46ae-8ed1-e9820e8d8c83-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.179972 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66lv9\" (UniqueName: \"kubernetes.io/projected/57196a6c-a8d4-4361-b282-3178b05ba6f4-kube-api-access-66lv9\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180004 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-node-bootstrap-token\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180028 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180072 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-key\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180122 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-config\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180379 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lmn6\" (UniqueName: \"kubernetes.io/projected/ac79c571-f4ba-4eee-850e-7c3ca6465535-kube-api-access-2lmn6\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180438 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-metrics-tls\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180523 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.180529 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-mountpoint-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181159 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac79c571-f4ba-4eee-850e-7c3ca6465535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181200 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b596a0e4-953f-462f-a68d-58cdfdb6294b-metrics-tls\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181216 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nklsc\" (UniqueName: \"kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181246 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181269 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sbn9\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181341 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181403 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181423 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec4dbb28-5584-44b3-9b23-6e9f811f546d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181461 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33ab196-8968-49ef-bc23-4b81e9f18d7a-config\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181492 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.181509 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-serving-cert\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182256 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182313 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdjhw\" (UniqueName: \"kubernetes.io/projected/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-kube-api-access-zdjhw\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182380 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182398 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-socket-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182418 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182483 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42fsr\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-kube-api-access-42fsr\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182558 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92m8h\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-kube-api-access-92m8h\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182576 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-metrics-certs\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182611 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-certs\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182639 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-images\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182655 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48lr4\" (UniqueName: \"kubernetes.io/projected/6ed91194-b05f-42a5-a8cd-6a94299b2b01-kube-api-access-48lr4\") pod \"migrator-59844c95c7-66qs6\" (UID: \"6ed91194-b05f-42a5-a8cd-6a94299b2b01\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182695 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0101a1a3-cd94-49dc-8a5b-9876927cdfab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182711 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182737 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182775 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182795 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mh49\" (UniqueName: \"kubernetes.io/projected/ead72a63-32ff-4a6c-b371-6c03adc8015b-kube-api-access-7mh49\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182810 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-srv-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182858 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89w2c\" (UniqueName: \"kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.182875 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btxrb\" (UniqueName: \"kubernetes.io/projected/ec4dbb28-5584-44b3-9b23-6e9f811f546d-kube-api-access-btxrb\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184424 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184671 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/204ab34c-de60-427e-be71-d44d8461b8b6-proxy-tls\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184690 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-auth-proxy-config\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184729 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4bfg\" (UniqueName: \"kubernetes.io/projected/346a0026-3d33-4d35-9a7d-a622071ce541-kube-api-access-t4bfg\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184772 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzzm4\" (UniqueName: \"kubernetes.io/projected/2ecce101-e9fe-4612-8306-94165bb43460-kube-api-access-wzzm4\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184829 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92990496-8dbe-46ae-8ed1-e9820e8d8c83-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184852 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-registration-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184866 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-webhook-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184908 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gwng\" (UniqueName: \"kubernetes.io/projected/929fa979-e34b-4512-baff-7d6a4ab601ce-kube-api-access-6gwng\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.184923 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/346a0026-3d33-4d35-9a7d-a622071ce541-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.185268 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmgs9\" (UniqueName: \"kubernetes.io/projected/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-kube-api-access-dmgs9\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.185894 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.186184 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.187363 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.195941 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.218444 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.230342 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.234400 4797 request.go:700] Waited for 1.963597647s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.236050 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.244515 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.256059 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.284362 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286668 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286783 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e7ecb074-1cfe-433b-a183-12a3ddd85144-tmpfs\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286813 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvftr\" (UniqueName: \"kubernetes.io/projected/204ab34c-de60-427e-be71-d44d8461b8b6-kube-api-access-zvftr\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.286836 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.786815624 +0000 UTC m=+137.644002333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286873 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-config-volume\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286906 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-csi-data-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286923 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286947 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.286967 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-client\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287036 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ecce101-e9fe-4612-8306-94165bb43460-service-ca-bundle\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287053 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-srv-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287071 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8frgw\" (UniqueName: \"kubernetes.io/projected/e7ecb074-1cfe-433b-a183-12a3ddd85144-kube-api-access-8frgw\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287088 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cad2fe8-0191-4762-a99c-5206030e7866-serving-cert\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287109 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287126 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c33ab196-8968-49ef-bc23-4b81e9f18d7a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287148 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnqlm\" (UniqueName: \"kubernetes.io/projected/b596a0e4-953f-462f-a68d-58cdfdb6294b-kube-api-access-cnqlm\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287166 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287181 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx8wp\" (UniqueName: \"kubernetes.io/projected/d7f55ff9-695a-468d-8a5a-727c083ec754-kube-api-access-kx8wp\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287203 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0101a1a3-cd94-49dc-8a5b-9876927cdfab-proxy-tls\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287219 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-plugins-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287243 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rww4\" (UniqueName: \"kubernetes.io/projected/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-kube-api-access-4rww4\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287257 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cad2fe8-0191-4762-a99c-5206030e7866-config\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287272 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkc66\" (UniqueName: \"kubernetes.io/projected/73da5c10-f8a5-4d86-a730-d876d3629e14-kube-api-access-nkc66\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287343 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e7ecb074-1cfe-433b-a183-12a3ddd85144-tmpfs\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287347 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c33ab196-8968-49ef-bc23-4b81e9f18d7a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287398 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287429 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf9d4794-e0c1-4200-912a-57e7c34d7250-trusted-ca\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287451 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287468 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287485 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/92990496-8dbe-46ae-8ed1-e9820e8d8c83-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287503 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ht4f\" (UniqueName: \"kubernetes.io/projected/2cad2fe8-0191-4762-a99c-5206030e7866-kube-api-access-2ht4f\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287519 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287548 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66lv9\" (UniqueName: \"kubernetes.io/projected/57196a6c-a8d4-4361-b282-3178b05ba6f4-kube-api-access-66lv9\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287565 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-node-bootstrap-token\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287581 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj7n5\" (UniqueName: \"kubernetes.io/projected/0101a1a3-cd94-49dc-8a5b-9876927cdfab-kube-api-access-lj7n5\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287597 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-default-certificate\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287612 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92990496-8dbe-46ae-8ed1-e9820e8d8c83-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287635 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-key\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287652 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-config\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287670 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lmn6\" (UniqueName: \"kubernetes.io/projected/ac79c571-f4ba-4eee-850e-7c3ca6465535-kube-api-access-2lmn6\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287688 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-metrics-tls\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287707 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-mountpoint-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287723 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac79c571-f4ba-4eee-850e-7c3ca6465535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287749 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b596a0e4-953f-462f-a68d-58cdfdb6294b-metrics-tls\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287765 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nklsc\" (UniqueName: \"kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287787 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287802 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33ab196-8968-49ef-bc23-4b81e9f18d7a-config\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287818 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec4dbb28-5584-44b3-9b23-6e9f811f546d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287837 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-serving-cert\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287853 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287870 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdjhw\" (UniqueName: \"kubernetes.io/projected/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-kube-api-access-zdjhw\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287889 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-socket-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287904 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287927 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42fsr\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-kube-api-access-42fsr\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287945 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92m8h\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-kube-api-access-92m8h\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287962 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-metrics-certs\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.287978 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-certs\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288011 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-images\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288027 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48lr4\" (UniqueName: \"kubernetes.io/projected/6ed91194-b05f-42a5-a8cd-6a94299b2b01-kube-api-access-48lr4\") pod \"migrator-59844c95c7-66qs6\" (UID: \"6ed91194-b05f-42a5-a8cd-6a94299b2b01\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288044 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0101a1a3-cd94-49dc-8a5b-9876927cdfab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288180 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288199 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-srv-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288217 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89w2c\" (UniqueName: \"kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288232 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288251 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288279 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mh49\" (UniqueName: \"kubernetes.io/projected/ead72a63-32ff-4a6c-b371-6c03adc8015b-kube-api-access-7mh49\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288302 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btxrb\" (UniqueName: \"kubernetes.io/projected/ec4dbb28-5584-44b3-9b23-6e9f811f546d-kube-api-access-btxrb\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288317 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-auth-proxy-config\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288332 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/204ab34c-de60-427e-be71-d44d8461b8b6-proxy-tls\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288349 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4bfg\" (UniqueName: \"kubernetes.io/projected/346a0026-3d33-4d35-9a7d-a622071ce541-kube-api-access-t4bfg\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288383 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzzm4\" (UniqueName: \"kubernetes.io/projected/2ecce101-e9fe-4612-8306-94165bb43460-kube-api-access-wzzm4\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288399 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92990496-8dbe-46ae-8ed1-e9820e8d8c83-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288416 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-registration-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288439 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-webhook-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288457 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/346a0026-3d33-4d35-9a7d-a622071ce541-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288487 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gwng\" (UniqueName: \"kubernetes.io/projected/929fa979-e34b-4512-baff-7d6a4ab601ce-kube-api-access-6gwng\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288514 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmgs9\" (UniqueName: \"kubernetes.io/projected/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-kube-api-access-dmgs9\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288534 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-cabundle\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288554 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac79c571-f4ba-4eee-850e-7c3ca6465535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288575 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf9d4794-e0c1-4200-912a-57e7c34d7250-metrics-tls\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288597 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288620 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-profile-collector-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288642 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-stats-auth\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288663 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30b52ed3-65f0-4673-84ae-14b5f07bed22-cert\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288679 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-config\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288697 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfzqp\" (UniqueName: \"kubernetes.io/projected/30b52ed3-65f0-4673-84ae-14b5f07bed22-kube-api-access-zfzqp\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.288717 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-service-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.289301 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-service-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.292925 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.293427 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.294546 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-srv-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.295145 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-csi-data-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.296392 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.297285 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-plugins-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.297750 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-cabundle\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.297839 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cad2fe8-0191-4762-a99c-5206030e7866-config\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.297888 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf9d4794-e0c1-4200-912a-57e7c34d7250-trusted-ca\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.298247 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c33ab196-8968-49ef-bc23-4b81e9f18d7a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.298400 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac79c571-f4ba-4eee-850e-7c3ca6465535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.298677 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92990496-8dbe-46ae-8ed1-e9820e8d8c83-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.298867 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.299124 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ecce101-e9fe-4612-8306-94165bb43460-service-ca-bundle\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.300442 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.8004211 +0000 UTC m=+137.657607879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.300566 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.300595 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.302659 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0101a1a3-cd94-49dc-8a5b-9876927cdfab-proxy-tls\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.303793 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.304669 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-config\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.306663 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33ab196-8968-49ef-bc23-4b81e9f18d7a-config\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.307110 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.307885 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-auth-proxy-config\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.308147 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" event={"ID":"d04a9a55-4a32-4b67-8f29-c817d4ec71b9","Type":"ContainerStarted","Data":"84ebf01e13d226b1f1c75157c6ceacd42e2893f10b52089d27abdcb78fe11417"} Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.309029 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-config\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.309367 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-client\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.309965 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/204ab34c-de60-427e-be71-d44d8461b8b6-images\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.311830 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.313809 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-srv-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.314022 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-socket-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315269 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0101a1a3-cd94-49dc-8a5b-9876927cdfab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315316 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cad2fe8-0191-4762-a99c-5206030e7866-serving-cert\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315432 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315463 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-mountpoint-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315526 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/929fa979-e34b-4512-baff-7d6a4ab601ce-registration-dir\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315541 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-node-bootstrap-token\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315319 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec4dbb28-5584-44b3-9b23-6e9f811f546d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.315774 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-stats-auth\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.316088 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/57196a6c-a8d4-4361-b282-3178b05ba6f4-etcd-ca\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.316773 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ead72a63-32ff-4a6c-b371-6c03adc8015b-signing-key\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.316841 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/346a0026-3d33-4d35-9a7d-a622071ce541-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.316931 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.317205 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.317667 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-default-certificate\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.317707 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92990496-8dbe-46ae-8ed1-e9820e8d8c83-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.330815 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ecce101-e9fe-4612-8306-94165bb43460-metrics-certs\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335227 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/204ab34c-de60-427e-be71-d44d8461b8b6-proxy-tls\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335246 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b596a0e4-953f-462f-a68d-58cdfdb6294b-metrics-tls\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335586 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf9d4794-e0c1-4200-912a-57e7c34d7250-metrics-tls\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335673 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac79c571-f4ba-4eee-850e-7c3ca6465535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335694 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d7f55ff9-695a-468d-8a5a-727c083ec754-profile-collector-cert\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.335875 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57196a6c-a8d4-4361-b282-3178b05ba6f4-serving-cert\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.336477 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/30b52ed3-65f0-4673-84ae-14b5f07bed22-cert\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.337061 4797 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.338770 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7ecb074-1cfe-433b-a183-12a3ddd85144-webhook-cert\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.341566 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73da5c10-f8a5-4d86-a730-d876d3629e14-certs\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.350675 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.351950 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kz2nv"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.353937 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.356775 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.367637 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.379350 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.389820 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.390338 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.890323303 +0000 UTC m=+137.747510012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.390557 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.391148 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-config-volume\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.396648 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.403977 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-metrics-tls\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.419114 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.477007 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sbn9\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.492136 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.493863 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:38.993847961 +0000 UTC m=+137.851034660 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.494676 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.510529 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvftr\" (UniqueName: \"kubernetes.io/projected/204ab34c-de60-427e-be71-d44d8461b8b6-kube-api-access-zvftr\") pod \"machine-config-operator-74547568cd-57ttr\" (UID: \"204ab34c-de60-427e-be71-d44d8461b8b6\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.514204 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.532624 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vg9x9"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.538161 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89w2c\" (UniqueName: \"kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c\") pod \"marketplace-operator-79b997595-q5lt7\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: W0104 11:50:38.543063 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbac1cac5_6f43_495f_9a7b_b5a1e13d5898.slice/crio-44b0427be3864e1fb8c306142f4aea92c94ef3d931a73b7665ffe79039524c6d WatchSource:0}: Error finding container 44b0427be3864e1fb8c306142f4aea92c94ef3d931a73b7665ffe79039524c6d: Status 404 returned error can't find the container with id 44b0427be3864e1fb8c306142f4aea92c94ef3d931a73b7665ffe79039524c6d Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.553757 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/92990496-8dbe-46ae-8ed1-e9820e8d8c83-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-hnsfb\" (UID: \"92990496-8dbe-46ae-8ed1-e9820e8d8c83\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.566184 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9kvs8"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.571263 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de9e561e-b3ce-4f63-9d4e-554e0ebd7037-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-7f52z\" (UID: \"de9e561e-b3ce-4f63-9d4e-554e0ebd7037\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.575223 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.589960 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gwng\" (UniqueName: \"kubernetes.io/projected/929fa979-e34b-4512-baff-7d6a4ab601ce-kube-api-access-6gwng\") pod \"csi-hostpathplugin-87bml\" (UID: \"929fa979-e34b-4512-baff-7d6a4ab601ce\") " pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: W0104 11:50:38.591289 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod107f145c_ad17_499f_ae0b_55c34e0d04e6.slice/crio-c36657fb764af1fc5abffe10789e50a823f8db57f85fd4acd0f45e723d13c008 WatchSource:0}: Error finding container c36657fb764af1fc5abffe10789e50a823f8db57f85fd4acd0f45e723d13c008: Status 404 returned error can't find the container with id c36657fb764af1fc5abffe10789e50a823f8db57f85fd4acd0f45e723d13c008 Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.593047 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.593468 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.093452448 +0000 UTC m=+137.950639157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.610089 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmgs9\" (UniqueName: \"kubernetes.io/projected/14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd-kube-api-access-dmgs9\") pod \"olm-operator-6b444d44fb-kqg5w\" (UID: \"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.636811 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rww4\" (UniqueName: \"kubernetes.io/projected/7a889bd9-728a-4eb5-bc26-76dfa255ae2d-kube-api-access-4rww4\") pod \"multus-admission-controller-857f4d67dd-nl9mh\" (UID: \"7a889bd9-728a-4eb5-bc26-76dfa255ae2d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.653215 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkc66\" (UniqueName: \"kubernetes.io/projected/73da5c10-f8a5-4d86-a730-d876d3629e14-kube-api-access-nkc66\") pod \"machine-config-server-x2zb8\" (UID: \"73da5c10-f8a5-4d86-a730-d876d3629e14\") " pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.653434 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mff6w"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.655000 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.655324 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.665147 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.668030 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.670909 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8frgw\" (UniqueName: \"kubernetes.io/projected/e7ecb074-1cfe-433b-a183-12a3ddd85144-kube-api-access-8frgw\") pod \"packageserver-d55dfcdfc-dlg2p\" (UID: \"e7ecb074-1cfe-433b-a183-12a3ddd85144\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.674724 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:38 crc kubenswrapper[4797]: W0104 11:50:38.675120 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod230f3952_4d78_4975_9304_0ae7ee0ec87e.slice/crio-a7c6e40f99be76e1d28d9e403c155b0b5ed817744e6fcf5e9ea71f7e54eca365 WatchSource:0}: Error finding container a7c6e40f99be76e1d28d9e403c155b0b5ed817744e6fcf5e9ea71f7e54eca365: Status 404 returned error can't find the container with id a7c6e40f99be76e1d28d9e403c155b0b5ed817744e6fcf5e9ea71f7e54eca365 Jan 04 11:50:38 crc kubenswrapper[4797]: W0104 11:50:38.677522 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66a0e14f_90de_4516_9642_14c4e6bbf9a1.slice/crio-e4d7c2288d1b9d113c849a4d1268cc9e44c192ea7a019e2aa17e0023fa7fee5f WatchSource:0}: Error finding container e4d7c2288d1b9d113c849a4d1268cc9e44c192ea7a019e2aa17e0023fa7fee5f: Status 404 returned error can't find the container with id e4d7c2288d1b9d113c849a4d1268cc9e44c192ea7a019e2aa17e0023fa7fee5f Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.681926 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-x2zb8" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.691123 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66lv9\" (UniqueName: \"kubernetes.io/projected/57196a6c-a8d4-4361-b282-3178b05ba6f4-kube-api-access-66lv9\") pod \"etcd-operator-b45778765-4dcv7\" (UID: \"57196a6c-a8d4-4361-b282-3178b05ba6f4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.695142 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.696100 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.196086544 +0000 UTC m=+138.053273253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.710940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnqlm\" (UniqueName: \"kubernetes.io/projected/b596a0e4-953f-462f-a68d-58cdfdb6294b-kube-api-access-cnqlm\") pod \"dns-operator-744455d44c-gzw7t\" (UID: \"b596a0e4-953f-462f-a68d-58cdfdb6294b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.714805 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-87bml" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.717923 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.731932 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ht4f\" (UniqueName: \"kubernetes.io/projected/2cad2fe8-0191-4762-a99c-5206030e7866-kube-api-access-2ht4f\") pod \"service-ca-operator-777779d784-vs78p\" (UID: \"2cad2fe8-0191-4762-a99c-5206030e7866\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.754418 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c33ab196-8968-49ef-bc23-4b81e9f18d7a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xmqdc\" (UID: \"c33ab196-8968-49ef-bc23-4b81e9f18d7a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.770711 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.784255 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.787860 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.791870 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.792853 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.793156 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pj8fz"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.794448 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.797429 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.797651 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.2975959 +0000 UTC m=+138.154782619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.797712 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.798180 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.298166775 +0000 UTC m=+138.155353484 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.798651 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.799386 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.812578 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx8wp\" (UniqueName: \"kubernetes.io/projected/d7f55ff9-695a-468d-8a5a-727c083ec754-kube-api-access-kx8wp\") pod \"catalog-operator-68c6474976-pd6pc\" (UID: \"d7f55ff9-695a-468d-8a5a-727c083ec754\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.830816 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj7n5\" (UniqueName: \"kubernetes.io/projected/0101a1a3-cd94-49dc-8a5b-9876927cdfab-kube-api-access-lj7n5\") pod \"machine-config-controller-84d6567774-nk574\" (UID: \"0101a1a3-cd94-49dc-8a5b-9876927cdfab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.856426 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.868678 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mhvp5"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.868746 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4bfg\" (UniqueName: \"kubernetes.io/projected/346a0026-3d33-4d35-9a7d-a622071ce541-kube-api-access-t4bfg\") pod \"package-server-manager-789f6589d5-64twc\" (UID: \"346a0026-3d33-4d35-9a7d-a622071ce541\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.869237 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.871916 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btxrb\" (UniqueName: \"kubernetes.io/projected/ec4dbb28-5584-44b3-9b23-6e9f811f546d-kube-api-access-btxrb\") pod \"control-plane-machine-set-operator-78cbb6b69f-vn8mq\" (UID: \"ec4dbb28-5584-44b3-9b23-6e9f811f546d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.893079 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mh49\" (UniqueName: \"kubernetes.io/projected/ead72a63-32ff-4a6c-b371-6c03adc8015b-kube-api-access-7mh49\") pod \"service-ca-9c57cc56f-l8qm2\" (UID: \"ead72a63-32ff-4a6c-b371-6c03adc8015b\") " pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.899098 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.899292 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.399265161 +0000 UTC m=+138.256451870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.899349 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:38 crc kubenswrapper[4797]: E0104 11:50:38.899809 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.399793955 +0000 UTC m=+138.256980664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.904850 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.909642 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nklsc\" (UniqueName: \"kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc\") pod \"collect-profiles-29458785-rwnbb\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.924781 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.928438 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.942867 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lmn6\" (UniqueName: \"kubernetes.io/projected/ac79c571-f4ba-4eee-850e-7c3ca6465535-kube-api-access-2lmn6\") pod \"kube-storage-version-migrator-operator-b67b599dd-d2pjn\" (UID: \"ac79c571-f4ba-4eee-850e-7c3ca6465535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.948980 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.951014 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92m8h\" (UniqueName: \"kubernetes.io/projected/bf9d4794-e0c1-4200-912a-57e7c34d7250-kube-api-access-92m8h\") pod \"ingress-operator-5b745b69d9-bsz5f\" (UID: \"bf9d4794-e0c1-4200-912a-57e7c34d7250\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.962055 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.965226 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.969714 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzzm4\" (UniqueName: \"kubernetes.io/projected/2ecce101-e9fe-4612-8306-94165bb43460-kube-api-access-wzzm4\") pod \"router-default-5444994796-zdnhw\" (UID: \"2ecce101-e9fe-4612-8306-94165bb43460\") " pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:38 crc kubenswrapper[4797]: I0104 11:50:38.991960 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdjhw\" (UniqueName: \"kubernetes.io/projected/3e5aef0e-7435-41ec-911a-64f5e9b1ff58-kube-api-access-zdjhw\") pod \"dns-default-25vnt\" (UID: \"3e5aef0e-7435-41ec-911a-64f5e9b1ff58\") " pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:38 crc kubenswrapper[4797]: W0104 11:50:38.993863 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dcb99cf_0f3c_4288_bcee_937ef73461ce.slice/crio-2f685321bef4a9a003a7eea31fb749d1b4a23140fc830f3f1233a8709cb079f1 WatchSource:0}: Error finding container 2f685321bef4a9a003a7eea31fb749d1b4a23140fc830f3f1233a8709cb079f1: Status 404 returned error can't find the container with id 2f685321bef4a9a003a7eea31fb749d1b4a23140fc830f3f1233a8709cb079f1 Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.000953 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.001444 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.501424244 +0000 UTC m=+138.358610953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.014232 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48lr4\" (UniqueName: \"kubernetes.io/projected/6ed91194-b05f-42a5-a8cd-6a94299b2b01-kube-api-access-48lr4\") pod \"migrator-59844c95c7-66qs6\" (UID: \"6ed91194-b05f-42a5-a8cd-6a94299b2b01\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.023004 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.030922 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42fsr\" (UniqueName: \"kubernetes.io/projected/e639d5c0-ac9a-4af9-81a0-a4932eb5e197-kube-api-access-42fsr\") pod \"cluster-image-registry-operator-dc59b4c8b-hmdpk\" (UID: \"e639d5c0-ac9a-4af9-81a0-a4932eb5e197\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.051245 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfzqp\" (UniqueName: \"kubernetes.io/projected/30b52ed3-65f0-4673-84ae-14b5f07bed22-kube-api-access-zfzqp\") pod \"ingress-canary-9lgj6\" (UID: \"30b52ed3-65f0-4673-84ae-14b5f07bed22\") " pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.082591 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.103350 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.103635 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.603623199 +0000 UTC m=+138.460809908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.120680 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.129959 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.130255 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.143318 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.143877 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87bml"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.155451 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.176966 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.184980 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.195297 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.206875 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.207221 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.707200959 +0000 UTC m=+138.564387668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.239714 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.292112 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9lgj6" Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.308564 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.308928 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.808913191 +0000 UTC m=+138.666099910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.313493 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.329083 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-x2zb8" event={"ID":"73da5c10-f8a5-4d86-a730-d876d3629e14","Type":"ContainerStarted","Data":"d2af9092adccab7022a936f1c21eab2a16f4f8fd00afe84f4478ede5a89ea0e9"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.331345 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" event={"ID":"7dcb99cf-0f3c-4288-bcee-937ef73461ce","Type":"ContainerStarted","Data":"2f685321bef4a9a003a7eea31fb749d1b4a23140fc830f3f1233a8709cb079f1"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.336133 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" event={"ID":"81081cdd-1c50-48d9-b176-832427d3ce5f","Type":"ContainerStarted","Data":"1ab4b93bf5130f594e47e5aa743bfffce7dfc8001cb1c318140a8a3b14a3ca64"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.341275 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" event={"ID":"0d05b9b3-d6d6-4fcc-9291-1ffac489c644","Type":"ContainerStarted","Data":"2b0605bf2adafdab73b5e6a765ce6e0bf12bc3738b707b006e4bee328854b68d"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.356888 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vs78p"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.361422 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wftnf" event={"ID":"fad36179-c797-4ea1-b751-9cf83b762fef","Type":"ContainerStarted","Data":"59961083ac3094599d8cd29bd856e30cb8aa46004707f58a7f588ee7abad0ba5"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.367819 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" event={"ID":"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39","Type":"ContainerStarted","Data":"40a60f6ec432f29e01dc2c711f48cff255829dfaff598c3f0cfd7b361ba9a400"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.367844 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" event={"ID":"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39","Type":"ContainerStarted","Data":"173a2beae14788f000b4c3c143fb3c1ad3d272ee17f62b4e1dfd22a3b9913244"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.370239 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vg9x9" event={"ID":"bac1cac5-6f43-495f-9a7b-b5a1e13d5898","Type":"ContainerStarted","Data":"283117a93cbec0972940d90e6b45bc682d904b2ed9d718dffcf2f0b46f1aa5b6"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.370262 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vg9x9" event={"ID":"bac1cac5-6f43-495f-9a7b-b5a1e13d5898","Type":"ContainerStarted","Data":"44b0427be3864e1fb8c306142f4aea92c94ef3d931a73b7665ffe79039524c6d"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.371349 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" event={"ID":"d04a9a55-4a32-4b67-8f29-c817d4ec71b9","Type":"ContainerStarted","Data":"3dc3cace0289375675ca254fbe34e37df308e75c984ab0daa7af4e2736a339f3"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.371385 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" event={"ID":"d04a9a55-4a32-4b67-8f29-c817d4ec71b9","Type":"ContainerStarted","Data":"7026a44fea126316192c7735be1c30d27a01a75bcd583ffe7c6e1e15f61a6edb"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.372556 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" event={"ID":"d7c0be23-efaf-490e-a685-d3cf605dd7ac","Type":"ContainerStarted","Data":"85e7185704b96a27e7ceacc216729e4f1a6d90bf9ede1ad1fa3f8f1f2779b409"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.373669 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" event={"ID":"107f145c-ad17-499f-ae0b-55c34e0d04e6","Type":"ContainerStarted","Data":"c36657fb764af1fc5abffe10789e50a823f8db57f85fd4acd0f45e723d13c008"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.374450 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" event={"ID":"66a0e14f-90de-4516-9642-14c4e6bbf9a1","Type":"ContainerStarted","Data":"e4d7c2288d1b9d113c849a4d1268cc9e44c192ea7a019e2aa17e0023fa7fee5f"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.376701 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" event={"ID":"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd","Type":"ContainerStarted","Data":"d7a5276e94a49e3a01517f0553c38815090dcdb07147fc4c264e1d8a27e07384"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.379449 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" event={"ID":"c62ca291-d5b1-4491-94d5-a6018ead98bc","Type":"ContainerStarted","Data":"109ec286c75c35a7a0ce9f671d43d8c64a06530601cfb94ece01262607f98c78"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.380802 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" event={"ID":"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0","Type":"ContainerStarted","Data":"e4c43fd65255ca0bdf279b0e777cfd6c08b4c13120719822924ded12c0bfa312"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.383598 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87bml" event={"ID":"929fa979-e34b-4512-baff-7d6a4ab601ce","Type":"ContainerStarted","Data":"19570fb01c36d2afa48eb1010d14dedc4ec96a968cfe575f49983a5d42593b59"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.387131 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" event={"ID":"c540aa4c-7310-40c0-b929-11b5d21e59fb","Type":"ContainerStarted","Data":"509e7292a35ffd8308898164b3a2850540bf2bf4211e525411e975ee6d7195ab"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.390835 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" event={"ID":"92990496-8dbe-46ae-8ed1-e9820e8d8c83","Type":"ContainerStarted","Data":"49edc79be5a1e822f33bfae8d0828a8e3de31dbc5becb37a0e52b362ccb0c006"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.391269 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4dcv7"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.392495 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" event={"ID":"7db1463e-4658-4160-bdf2-76b60a23c85e","Type":"ContainerStarted","Data":"1a2d3828ebb39de4805facf5fc258ffb7ab30b758f9293e834af38db5eebd3ee"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.392539 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" event={"ID":"7db1463e-4658-4160-bdf2-76b60a23c85e","Type":"ContainerStarted","Data":"0052f0daf3c8399e2de436f3e0ca3c3cd9b99b9e34e6d5f773125ffa32839f8c"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.394510 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mff6w" event={"ID":"230f3952-4d78-4975-9304-0ae7ee0ec87e","Type":"ContainerStarted","Data":"a7c6e40f99be76e1d28d9e403c155b0b5ed817744e6fcf5e9ea71f7e54eca365"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.396345 4797 generic.go:334] "Generic (PLEG): container finished" podID="48f0c0da-d926-4b80-bacf-6dcfd2298456" containerID="a1cafb7e09e8a339c012a529d18814048a31ab1d73a35889d7442524c0450d21" exitCode=0 Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.396390 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" event={"ID":"48f0c0da-d926-4b80-bacf-6dcfd2298456","Type":"ContainerDied","Data":"a1cafb7e09e8a339c012a529d18814048a31ab1d73a35889d7442524c0450d21"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.396406 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" event={"ID":"48f0c0da-d926-4b80-bacf-6dcfd2298456","Type":"ContainerStarted","Data":"3723a5b7ba5d74c266697921ce19d474b01d6813e70ca528da2b6e50a3f702c4"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.404717 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" event={"ID":"204ab34c-de60-427e-be71-d44d8461b8b6","Type":"ContainerStarted","Data":"d117ee62650298e0af6e44dc1a658ff9681e6bdac6fa9720b2cff099e481f56b"} Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.409808 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.410957 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:39.910936671 +0000 UTC m=+138.768123380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: W0104 11:50:39.456811 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde9e561e_b3ce_4f63_9d4e_554e0ebd7037.slice/crio-9e5ccb3be403e3da5ae30626389a96a3a0a0a831c7f94ff1f40efb03272e4f69 WatchSource:0}: Error finding container 9e5ccb3be403e3da5ae30626389a96a3a0a0a831c7f94ff1f40efb03272e4f69: Status 404 returned error can't find the container with id 9e5ccb3be403e3da5ae30626389a96a3a0a0a831c7f94ff1f40efb03272e4f69 Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.470510 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.514520 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.515812 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.015789865 +0000 UTC m=+138.872976634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.615500 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.615762 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.115736071 +0000 UTC m=+138.972922790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.616154 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.616558 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.116543682 +0000 UTC m=+138.973730391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.682269 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gzw7t"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.694048 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc"] Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.719242 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.719407 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.219354452 +0000 UTC m=+139.076541161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.719605 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.719905 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.219893016 +0000 UTC m=+139.077079725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.820603 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.820790 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.320762986 +0000 UTC m=+139.177949695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.820959 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.821312 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.32130018 +0000 UTC m=+139.178486889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.922341 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:39 crc kubenswrapper[4797]: E0104 11:50:39.922768 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.422612691 +0000 UTC m=+139.279799400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:39 crc kubenswrapper[4797]: I0104 11:50:39.933530 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nl9mh"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.023723 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.024096 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.524081167 +0000 UTC m=+139.381267886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.024789 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.068666 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-25vnt"] Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.093487 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc33ab196_8968_49ef_bc23_4b81e9f18d7a.slice/crio-d8774caa8dfcd0749cae4742866204aa08cb9f92136374b8a0cf9fa5c9e600d0 WatchSource:0}: Error finding container d8774caa8dfcd0749cae4742866204aa08cb9f92136374b8a0cf9fa5c9e600d0: Status 404 returned error can't find the container with id d8774caa8dfcd0749cae4742866204aa08cb9f92136374b8a0cf9fa5c9e600d0 Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.097023 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l8qm2"] Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.099319 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podead72a63_32ff_4a6c_b371_6c03adc8015b.slice/crio-c3d4be61ea8aebb0a5f31caf781f64c3e50d37d8bc7d2a01882f935221159e3a WatchSource:0}: Error finding container c3d4be61ea8aebb0a5f31caf781f64c3e50d37d8bc7d2a01882f935221159e3a: Status 404 returned error can't find the container with id c3d4be61ea8aebb0a5f31caf781f64c3e50d37d8bc7d2a01882f935221159e3a Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.099333 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.104334 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.124598 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.125003 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.624971947 +0000 UTC m=+139.482158656 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.131143 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.139589 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9lgj6"] Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.148666 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a889bd9_728a_4eb5_bc26_76dfa255ae2d.slice/crio-2c90c84e23f4cf896ebaaa98cc69be2a667ea6bdaa28f1365c2002ddca907bfb WatchSource:0}: Error finding container 2c90c84e23f4cf896ebaaa98cc69be2a667ea6bdaa28f1365c2002ddca907bfb: Status 404 returned error can't find the container with id 2c90c84e23f4cf896ebaaa98cc69be2a667ea6bdaa28f1365c2002ddca907bfb Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.152798 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7f55ff9_695a_468d_8a5a_727c083ec754.slice/crio-e89bcaad4be09df29f76ac1d75d411f34f8bc8f3d35e89f5d1f80968d80d9adb WatchSource:0}: Error finding container e89bcaad4be09df29f76ac1d75d411f34f8bc8f3d35e89f5d1f80968d80d9adb: Status 404 returned error can't find the container with id e89bcaad4be09df29f76ac1d75d411f34f8bc8f3d35e89f5d1f80968d80d9adb Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.157096 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.175950 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" podStartSLOduration=119.175933501 podStartE2EDuration="1m59.175933501s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.171909156 +0000 UTC m=+139.029095865" watchObservedRunningTime="2026-01-04 11:50:40.175933501 +0000 UTC m=+139.033120210" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.225575 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.226179 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.726165955 +0000 UTC m=+139.583352664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.245925 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec4dbb28_5584_44b3_9b23_6e9f811f546d.slice/crio-874177305dc96fd8570091cb0f60fff30ba585f4693e27dff7fda3481c9b36be WatchSource:0}: Error finding container 874177305dc96fd8570091cb0f60fff30ba585f4693e27dff7fda3481c9b36be: Status 404 returned error can't find the container with id 874177305dc96fd8570091cb0f60fff30ba585f4693e27dff7fda3481c9b36be Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.327725 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.328498 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.828474713 +0000 UTC m=+139.685661422 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.356923 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.393316 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-kz2nv" podStartSLOduration=119.393289789 podStartE2EDuration="1m59.393289789s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.380591537 +0000 UTC m=+139.237778246" watchObservedRunningTime="2026-01-04 11:50:40.393289789 +0000 UTC m=+139.250476508" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.432686 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.432960 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:40.932949347 +0000 UTC m=+139.790136056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.440743 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" event={"ID":"2cad2fe8-0191-4762-a99c-5206030e7866","Type":"ContainerStarted","Data":"7f13019d7837a8961d260db46bc5fc4daac19b97c674e2ea67377929350a7fb4"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.441859 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" event={"ID":"c33ab196-8968-49ef-bc23-4b81e9f18d7a","Type":"ContainerStarted","Data":"d8774caa8dfcd0749cae4742866204aa08cb9f92136374b8a0cf9fa5c9e600d0"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.442557 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" event={"ID":"e639d5c0-ac9a-4af9-81a0-a4932eb5e197","Type":"ContainerStarted","Data":"d7afb7aa66cf9e798d4501cd9561cc7a3c01f41f44e1ef78db0d2996fe9caec1"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.443882 4797 generic.go:334] "Generic (PLEG): container finished" podID="c540aa4c-7310-40c0-b929-11b5d21e59fb" containerID="afb075e77618eaabc585ba5f0f6e851ac60de1b8edfb20eb2d8306ce023e9c3c" exitCode=0 Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.443943 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" event={"ID":"c540aa4c-7310-40c0-b929-11b5d21e59fb","Type":"ContainerDied","Data":"afb075e77618eaabc585ba5f0f6e851ac60de1b8edfb20eb2d8306ce023e9c3c"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.447119 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" event={"ID":"ead72a63-32ff-4a6c-b371-6c03adc8015b","Type":"ContainerStarted","Data":"c3d4be61ea8aebb0a5f31caf781f64c3e50d37d8bc7d2a01882f935221159e3a"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.450837 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" event={"ID":"7a889bd9-728a-4eb5-bc26-76dfa255ae2d","Type":"ContainerStarted","Data":"2c90c84e23f4cf896ebaaa98cc69be2a667ea6bdaa28f1365c2002ddca907bfb"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.461629 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" event={"ID":"57196a6c-a8d4-4361-b282-3178b05ba6f4","Type":"ContainerStarted","Data":"5d4ca99152ded09565633c4ace3a42a8dcc989f35d03946cd5c42d8189ccc126"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.506329 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" event={"ID":"81081cdd-1c50-48d9-b176-832427d3ce5f","Type":"ContainerStarted","Data":"5639c35657bb1691897ef570575f46e5d2aef7465661320e9942e9edd9522b76"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.508205 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.513541 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-25vnt" event={"ID":"3e5aef0e-7435-41ec-911a-64f5e9b1ff58","Type":"ContainerStarted","Data":"c8a0466d4f88b65f8b74976148ac741bbc849b2dbdef46ce4844f7646c58cb02"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.522486 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" event={"ID":"346a0026-3d33-4d35-9a7d-a622071ce541","Type":"ContainerStarted","Data":"e3c9076641475b1dc6ad422571c7f9a1b670322b5797eedcd5a98d950453d7b4"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.527790 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" event={"ID":"d7f55ff9-695a-468d-8a5a-727c083ec754","Type":"ContainerStarted","Data":"e89bcaad4be09df29f76ac1d75d411f34f8bc8f3d35e89f5d1f80968d80d9adb"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.565744 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-nk574"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.566327 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.575330 4797 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6vf5r container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.575393 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.575949 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.075932598 +0000 UTC m=+139.933119297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.600864 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" event={"ID":"ec4dbb28-5584-44b3-9b23-6e9f811f546d","Type":"ContainerStarted","Data":"874177305dc96fd8570091cb0f60fff30ba585f4693e27dff7fda3481c9b36be"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.610018 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.635152 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn"] Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.643453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" event={"ID":"7dcb99cf-0f3c-4288-bcee-937ef73461ce","Type":"ContainerStarted","Data":"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.643781 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.666473 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" event={"ID":"e7ecb074-1cfe-433b-a183-12a3ddd85144","Type":"ContainerStarted","Data":"52d3b3e1175af3e7db50b2db0b73de613aa83889609fb774cc7caa91fc73dc05"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.685369 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.685667 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.1856538 +0000 UTC m=+140.042840509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: W0104 11:50:40.688080 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ed91194_b05f_42a5_a8cd_6a94299b2b01.slice/crio-88a3bd85a5bb03bce3b38a6350b20dcaa658e4d5d10d6b9369bca9cdf17bcb9f WatchSource:0}: Error finding container 88a3bd85a5bb03bce3b38a6350b20dcaa658e4d5d10d6b9369bca9cdf17bcb9f: Status 404 returned error can't find the container with id 88a3bd85a5bb03bce3b38a6350b20dcaa658e4d5d10d6b9369bca9cdf17bcb9f Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.689459 4797 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-q5lt7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.689494 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.696063 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wftnf" event={"ID":"fad36179-c797-4ea1-b751-9cf83b762fef","Type":"ContainerStarted","Data":"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.717166 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" event={"ID":"de9e561e-b3ce-4f63-9d4e-554e0ebd7037","Type":"ContainerStarted","Data":"9e5ccb3be403e3da5ae30626389a96a3a0a0a831c7f94ff1f40efb03272e4f69"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.730046 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mff6w" event={"ID":"230f3952-4d78-4975-9304-0ae7ee0ec87e","Type":"ContainerStarted","Data":"8ce1a0036c6ee34d97659ffc6a8139e871d7dcebef245addfd31544996d08ab0"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.730259 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.731960 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" event={"ID":"b596a0e4-953f-462f-a68d-58cdfdb6294b","Type":"ContainerStarted","Data":"d7d0a5aeb74d0c214e1458ecb6360307e9cff29440f1d5481d5c6be61d38e280"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.740002 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" event={"ID":"14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd","Type":"ContainerStarted","Data":"7b84c39329a2da63e10a5684524db277c894d4f54511115fac53367996069dae"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.740818 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.755333 4797 patch_prober.go:28] interesting pod/console-operator-58897d9998-mff6w container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.755379 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-mff6w" podUID="230f3952-4d78-4975-9304-0ae7ee0ec87e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.759805 4797 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-kqg5w container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.759851 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" podUID="14ac7e0f-adbc-4ae9-a6e1-cbb483365cbd" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.763266 4797 generic.go:334] "Generic (PLEG): container finished" podID="107f145c-ad17-499f-ae0b-55c34e0d04e6" containerID="165f88932b5031bf124eacf89fafe93fd4e166142124824ac9562020107b8a43" exitCode=0 Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.763448 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" event={"ID":"107f145c-ad17-499f-ae0b-55c34e0d04e6","Type":"ContainerDied","Data":"165f88932b5031bf124eacf89fafe93fd4e166142124824ac9562020107b8a43"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.777041 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-x2zb8" event={"ID":"73da5c10-f8a5-4d86-a730-d876d3629e14","Type":"ContainerStarted","Data":"33884fe789091c6c5bc3881c481c0b4256a5c847ee17f6be99ad2727df3425d1"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.792276 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.793455 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.2934356 +0000 UTC m=+140.150622309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.801794 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" event={"ID":"204ab34c-de60-427e-be71-d44d8461b8b6","Type":"ContainerStarted","Data":"ff90e4bbb0937834ae92404f536085f4ffbf12a3b58c891c6a510246e8006139"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.813593 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-x2zb8" podStartSLOduration=4.813577928 podStartE2EDuration="4.813577928s" podCreationTimestamp="2026-01-04 11:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.812634123 +0000 UTC m=+139.669820832" watchObservedRunningTime="2026-01-04 11:50:40.813577928 +0000 UTC m=+139.670764637" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.819486 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9lgj6" event={"ID":"30b52ed3-65f0-4673-84ae-14b5f07bed22","Type":"ContainerStarted","Data":"5d858134cdacb60d2cf28961ab3be45cfd0cb923f048fccfbb0b6c3a2f91e70a"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.830513 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" event={"ID":"bf9d4794-e0c1-4200-912a-57e7c34d7250","Type":"ContainerStarted","Data":"5b371b1f3c75f9c2c29db53b18d71c6313898a253e872b639c1dec1e4b071a9a"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.842066 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" event={"ID":"0d05b9b3-d6d6-4fcc-9291-1ffac489c644","Type":"ContainerStarted","Data":"043ed33632cf81848cf000b3ced946c7e2d7b83a889d0c028e2fda9ade6af2bd"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.857650 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" podStartSLOduration=119.85763418 podStartE2EDuration="1m59.85763418s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.856180432 +0000 UTC m=+139.713367161" watchObservedRunningTime="2026-01-04 11:50:40.85763418 +0000 UTC m=+139.714820889" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.868422 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" event={"ID":"c62ca291-d5b1-4491-94d5-a6018ead98bc","Type":"ContainerStarted","Data":"8db855fcf97b3fabc0e596ab042c51e8f255e67c69566770d435518dfa5d775d"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.879029 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" event={"ID":"66a0e14f-90de-4516-9642-14c4e6bbf9a1","Type":"ContainerStarted","Data":"780b6436b6724b43a7c57ddd8774aa89805a5f5085de35ce9beab7539d0e2954"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.893790 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.895301 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.395286376 +0000 UTC m=+140.252473085 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.905927 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" event={"ID":"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0","Type":"ContainerStarted","Data":"258b05a7cba49800d73d805c1a69040f30359db0e2c763a584aefa8ce617c66b"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.906642 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.943090 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" podStartSLOduration=119.943074897 podStartE2EDuration="1m59.943074897s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.897198956 +0000 UTC m=+139.754385665" watchObservedRunningTime="2026-01-04 11:50:40.943074897 +0000 UTC m=+139.800261606" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.960809 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" event={"ID":"d7c0be23-efaf-490e-a685-d3cf605dd7ac","Type":"ContainerStarted","Data":"0f71ef3e975553151c254518ebbd283e63eec72c5d8eaafad7f0217f35a1c885"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.977487 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zdnhw" event={"ID":"2ecce101-e9fe-4612-8306-94165bb43460","Type":"ContainerStarted","Data":"1b353368e9f430586b100f6196141ae6e5422b26443eaa21fe19b947132fde5b"} Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.978833 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.978855 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.992133 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wftnf" podStartSLOduration=119.99211167 podStartE2EDuration="1m59.99211167s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:40.991792631 +0000 UTC m=+139.848979340" watchObservedRunningTime="2026-01-04 11:50:40.99211167 +0000 UTC m=+139.849298379" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.998432 4797 patch_prober.go:28] interesting pod/downloads-7954f5f757-vg9x9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.998474 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vg9x9" podUID="bac1cac5-6f43-495f-9a7b-b5a1e13d5898" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.998505 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:40 crc kubenswrapper[4797]: E0104 11:50:40.998709 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.498673231 +0000 UTC m=+140.355859940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:40 crc kubenswrapper[4797]: I0104 11:50:40.999161 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.002947 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.502915362 +0000 UTC m=+140.360102141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.025356 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mff6w" podStartSLOduration=120.025340029 podStartE2EDuration="2m0.025340029s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.023394088 +0000 UTC m=+139.880580797" watchObservedRunningTime="2026-01-04 11:50:41.025340029 +0000 UTC m=+139.882526738" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.045489 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.099766 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.100939 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.600924397 +0000 UTC m=+140.458111096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.149504 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podStartSLOduration=120.149482858 podStartE2EDuration="2m0.149482858s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.126505567 +0000 UTC m=+139.983692276" watchObservedRunningTime="2026-01-04 11:50:41.149482858 +0000 UTC m=+140.006669567" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.201686 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.202026 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.702013963 +0000 UTC m=+140.559200672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.300090 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" podStartSLOduration=120.300070639 podStartE2EDuration="2m0.300070639s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.292622004 +0000 UTC m=+140.149808713" watchObservedRunningTime="2026-01-04 11:50:41.300070639 +0000 UTC m=+140.157257348" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.304682 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.305023 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.805005998 +0000 UTC m=+140.662192707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.410306 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gcbc4" podStartSLOduration=120.410291083 podStartE2EDuration="2m0.410291083s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.33793146 +0000 UTC m=+140.195118169" watchObservedRunningTime="2026-01-04 11:50:41.410291083 +0000 UTC m=+140.267477792" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.410753 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.411040 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:41.911028923 +0000 UTC m=+140.768215632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.464925 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-vg9x9" podStartSLOduration=120.464911753 podStartE2EDuration="2m0.464911753s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.440261688 +0000 UTC m=+140.297448397" watchObservedRunningTime="2026-01-04 11:50:41.464911753 +0000 UTC m=+140.322098462" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.465363 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tlvtx" podStartSLOduration=120.465358934 podStartE2EDuration="2m0.465358934s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.463238439 +0000 UTC m=+140.320425148" watchObservedRunningTime="2026-01-04 11:50:41.465358934 +0000 UTC m=+140.322545643" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.502673 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-726np" podStartSLOduration=120.502657881 podStartE2EDuration="2m0.502657881s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:41.481739013 +0000 UTC m=+140.338925722" watchObservedRunningTime="2026-01-04 11:50:41.502657881 +0000 UTC m=+140.359844590" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.516683 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.517050 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.017032867 +0000 UTC m=+140.874219576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.620020 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.620490 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.120479314 +0000 UTC m=+140.977666013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.663841 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.721646 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.722812 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.222012551 +0000 UTC m=+141.079199260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.740696 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.741059 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.241049089 +0000 UTC m=+141.098235788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.845417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.845764 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.345742959 +0000 UTC m=+141.202929658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:41 crc kubenswrapper[4797]: I0104 11:50:41.947053 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:41 crc kubenswrapper[4797]: E0104 11:50:41.947518 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.447508042 +0000 UTC m=+141.304694751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.050784 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.051164 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.551146714 +0000 UTC m=+141.408333423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.153398 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.153696 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.653680787 +0000 UTC m=+141.510867496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.183271 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" event={"ID":"b596a0e4-953f-462f-a68d-58cdfdb6294b","Type":"ContainerStarted","Data":"f528138acbeab95c7b7d67e9ab9c64815992785086d30b787997d9b49ae620d2"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.254365 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.254722 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.754704051 +0000 UTC m=+141.611890760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.266462 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zdnhw" event={"ID":"2ecce101-e9fe-4612-8306-94165bb43460","Type":"ContainerStarted","Data":"dd53c1efd5f31f1f76321a7baf99d19b95e80b2f6815c87c8c7c5612cd3ba6a3"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.299349 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" event={"ID":"de9e561e-b3ce-4f63-9d4e-554e0ebd7037","Type":"ContainerStarted","Data":"40df7e6f0752b21d159285f1d67439422f4eac4346969a310a3e40e830b6381b"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.313793 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zdnhw" podStartSLOduration=121.313778627 podStartE2EDuration="2m1.313778627s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.312602026 +0000 UTC m=+141.169788745" watchObservedRunningTime="2026-01-04 11:50:42.313778627 +0000 UTC m=+141.170965326" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.327853 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" event={"ID":"57196a6c-a8d4-4361-b282-3178b05ba6f4","Type":"ContainerStarted","Data":"9f8e3f82a3841d8c884f060f6a01e88b73d570c84042c24344235bfcf9fd9111"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.334895 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" event={"ID":"ead72a63-32ff-4a6c-b371-6c03adc8015b","Type":"ContainerStarted","Data":"b3007011152083a0914ad2d227d80f06de78e5ee10a240375e506c6076d66ed3"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.355895 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.357730 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.857717256 +0000 UTC m=+141.714903965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.381952 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9lgj6" event={"ID":"30b52ed3-65f0-4673-84ae-14b5f07bed22","Type":"ContainerStarted","Data":"d651eb11c47ec0ce08e70037703fe02482fb19209ced53aec34f8346a261453f"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.401061 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-7f52z" podStartSLOduration=121.40104131 podStartE2EDuration="2m1.40104131s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.373226932 +0000 UTC m=+141.230413641" watchObservedRunningTime="2026-01-04 11:50:42.40104131 +0000 UTC m=+141.258228029" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.420656 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" event={"ID":"6ed91194-b05f-42a5-a8cd-6a94299b2b01","Type":"ContainerStarted","Data":"88a3bd85a5bb03bce3b38a6350b20dcaa658e4d5d10d6b9369bca9cdf17bcb9f"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.457908 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.458816 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:42.958801342 +0000 UTC m=+141.815988051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.476535 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-4dcv7" podStartSLOduration=121.476519676 podStartE2EDuration="2m1.476519676s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.473975839 +0000 UTC m=+141.331162548" watchObservedRunningTime="2026-01-04 11:50:42.476519676 +0000 UTC m=+141.333706385" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.529396 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" event={"ID":"2cad2fe8-0191-4762-a99c-5206030e7866","Type":"ContainerStarted","Data":"803832c1966ff6069bbeaad07b19292e7044baa612490edab7cdbf1ff923d476"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.559864 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.560658 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.060647147 +0000 UTC m=+141.917833856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.594580 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" event={"ID":"c33ab196-8968-49ef-bc23-4b81e9f18d7a","Type":"ContainerStarted","Data":"a902d36aac1e3ec72158b2435aef145e75fbbfda8c2ce3d65cc8d95e6a8b85e6"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.596960 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-l8qm2" podStartSLOduration=121.596943597 podStartE2EDuration="2m1.596943597s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.595313214 +0000 UTC m=+141.452499923" watchObservedRunningTime="2026-01-04 11:50:42.596943597 +0000 UTC m=+141.454130306" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.630023 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" event={"ID":"e7ecb074-1cfe-433b-a183-12a3ddd85144","Type":"ContainerStarted","Data":"0de814f3f605990becab46b957dfb83434268dc36241ad0b7a108c7f592426d8"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.630952 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.638176 4797 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dlg2p container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.638225 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" podUID="e7ecb074-1cfe-433b-a183-12a3ddd85144" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.654945 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" event={"ID":"ec4dbb28-5584-44b3-9b23-6e9f811f546d","Type":"ContainerStarted","Data":"295925e5cdce6e36e6e1e49e3de44deccdd73226fbc0b051964ddda727f8cce3"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.661318 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.662307 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.162291357 +0000 UTC m=+142.019478056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.704152 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" event={"ID":"d7f55ff9-695a-468d-8a5a-727c083ec754","Type":"ContainerStarted","Data":"5f10d027c273f8aa53bea9f5d1c5904456dda98aa1d2ef67dd3297122608794f"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.704858 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.717269 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.733552 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" event={"ID":"0101a1a3-cd94-49dc-8a5b-9876927cdfab","Type":"ContainerStarted","Data":"2054db043f1a9b94b3e1b5d383c63dfe2e9b28ccc72db7c0b902d61eddde7b18"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.766219 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.768428 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.268417465 +0000 UTC m=+142.125604174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.769671 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9lgj6" podStartSLOduration=6.769656347 podStartE2EDuration="6.769656347s" podCreationTimestamp="2026-01-04 11:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.679026105 +0000 UTC m=+141.536212814" watchObservedRunningTime="2026-01-04 11:50:42.769656347 +0000 UTC m=+141.626843046" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.772125 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" event={"ID":"346a0026-3d33-4d35-9a7d-a622071ce541","Type":"ContainerStarted","Data":"c9c59b034b260f521ebc21b6fd2aa3fcfd630395b7685c5c9f5fcb080afecdcb"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.783467 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" event={"ID":"204ab34c-de60-427e-be71-d44d8461b8b6","Type":"ContainerStarted","Data":"baeaffafb5c00ce984983e664d7edb73be7ba6551bf4518fbe3d3fedbe3b7f5e"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.850550 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" event={"ID":"5993e116-1e4f-47ba-a301-47a026bdbf14","Type":"ContainerStarted","Data":"73aa9dc1c3659de68a540ffd50286ba57c684af80d97f6ed479da7a0d852167c"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.870337 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vs78p" podStartSLOduration=121.870322351 podStartE2EDuration="2m1.870322351s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.770420917 +0000 UTC m=+141.627607626" watchObservedRunningTime="2026-01-04 11:50:42.870322351 +0000 UTC m=+141.727509060" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.870495 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.871268 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xmqdc" podStartSLOduration=121.871262826 podStartE2EDuration="2m1.871262826s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.869996673 +0000 UTC m=+141.727183382" watchObservedRunningTime="2026-01-04 11:50:42.871262826 +0000 UTC m=+141.728449525" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.871481 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.371460741 +0000 UTC m=+142.228647450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.899426 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" event={"ID":"48f0c0da-d926-4b80-bacf-6dcfd2298456","Type":"ContainerStarted","Data":"fd698a44f561f901162a000844f453ac167fa16e42d240d41d427fc46b759335"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.907481 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.922775 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vn8mq" podStartSLOduration=121.922759214 podStartE2EDuration="2m1.922759214s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.922350043 +0000 UTC m=+141.779536752" watchObservedRunningTime="2026-01-04 11:50:42.922759214 +0000 UTC m=+141.779945923" Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.981632 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" event={"ID":"92990496-8dbe-46ae-8ed1-e9820e8d8c83","Type":"ContainerStarted","Data":"f934aa6c677eb90f8e14bf5a7670afbd36f5f6bad65eff8191cfd1be1777f147"} Jan 04 11:50:42 crc kubenswrapper[4797]: I0104 11:50:42.985410 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:42 crc kubenswrapper[4797]: E0104 11:50:42.987334 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.487322823 +0000 UTC m=+142.344509532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.072122 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" event={"ID":"ac79c571-f4ba-4eee-850e-7c3ca6465535","Type":"ContainerStarted","Data":"46e46d92bf856f616195c4a579a3b6e005d7731e447dcde76c65086019b7a4c7"} Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.088641 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.089078 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.589059416 +0000 UTC m=+142.446246125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.089567 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" podStartSLOduration=122.089551969 podStartE2EDuration="2m2.089551969s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:42.980291929 +0000 UTC m=+141.837478638" watchObservedRunningTime="2026-01-04 11:50:43.089551969 +0000 UTC m=+141.946738678" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.091075 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pd6pc" podStartSLOduration=122.091069528 podStartE2EDuration="2m2.091069528s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.088780378 +0000 UTC m=+141.945967087" watchObservedRunningTime="2026-01-04 11:50:43.091069528 +0000 UTC m=+141.948256237" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.132385 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" event={"ID":"bf9d4794-e0c1-4200-912a-57e7c34d7250","Type":"ContainerStarted","Data":"b7c4c8eb578b938616cf9194abac68c9cfea058215b2c4d830cb32d29b6f7008"} Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.151707 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" event={"ID":"0d05b9b3-d6d6-4fcc-9291-1ffac489c644","Type":"ContainerStarted","Data":"f3189af7dab4405b7019a48a4b3df4101871cb5ae857337b308fb66fe0e9c3e8"} Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.156402 4797 patch_prober.go:28] interesting pod/downloads-7954f5f757-vg9x9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.156444 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vg9x9" podUID="bac1cac5-6f43-495f-9a7b-b5a1e13d5898" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.162589 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.172089 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.181375 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kqg5w" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.197493 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mff6w" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.204289 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.212408 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.216964 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.716942453 +0000 UTC m=+142.574129162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.239572 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:43 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:43 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:43 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.239839 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.240535 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-57ttr" podStartSLOduration=122.2405264 podStartE2EDuration="2m2.2405264s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.240399156 +0000 UTC m=+142.097585865" watchObservedRunningTime="2026-01-04 11:50:43.2405264 +0000 UTC m=+142.097713109" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.242266 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" podStartSLOduration=122.242261655 podStartE2EDuration="2m2.242261655s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.156726887 +0000 UTC m=+142.013913596" watchObservedRunningTime="2026-01-04 11:50:43.242261655 +0000 UTC m=+142.099448364" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.305142 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" podStartSLOduration=122.30512892 podStartE2EDuration="2m2.30512892s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.303361094 +0000 UTC m=+142.160547803" watchObservedRunningTime="2026-01-04 11:50:43.30512892 +0000 UTC m=+142.162315629" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.314558 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.314929 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.814912436 +0000 UTC m=+142.672099145 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.353363 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-hnsfb" podStartSLOduration=122.353347962 podStartE2EDuration="2m2.353347962s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.348075814 +0000 UTC m=+142.205262523" watchObservedRunningTime="2026-01-04 11:50:43.353347962 +0000 UTC m=+142.210534671" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.407348 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" podStartSLOduration=122.407330255 podStartE2EDuration="2m2.407330255s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.405527548 +0000 UTC m=+142.262714257" watchObservedRunningTime="2026-01-04 11:50:43.407330255 +0000 UTC m=+142.264516964" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.418482 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.418846 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:43.918835396 +0000 UTC m=+142.776022095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.467441 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-mhvp5" podStartSLOduration=122.467420897 podStartE2EDuration="2m2.467420897s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.465192059 +0000 UTC m=+142.322378768" watchObservedRunningTime="2026-01-04 11:50:43.467420897 +0000 UTC m=+142.324607606" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.517042 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.518240 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.522129 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.523541 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.023526536 +0000 UTC m=+142.880713245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.530340 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.535523 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" podStartSLOduration=122.535502599 podStartE2EDuration="2m2.535502599s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.51338615 +0000 UTC m=+142.370572859" watchObservedRunningTime="2026-01-04 11:50:43.535502599 +0000 UTC m=+142.392689308" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.548923 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.625469 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.625563 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnht7\" (UniqueName: \"kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.625593 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.625637 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.625946 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.125934556 +0000 UTC m=+142.983121255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.709667 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" podStartSLOduration=122.709648997 podStartE2EDuration="2m2.709648997s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:43.683115192 +0000 UTC m=+142.540301901" watchObservedRunningTime="2026-01-04 11:50:43.709648997 +0000 UTC m=+142.566835706" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.709971 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.710811 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.721961 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.726706 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.726872 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.726921 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnht7\" (UniqueName: \"kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.726975 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.727353 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.727450 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.227436222 +0000 UTC m=+143.084622931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.727634 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.749417 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.790881 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnht7\" (UniqueName: \"kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7\") pod \"community-operators-lrkgj\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.829733 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.829956 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87cfg\" (UniqueName: \"kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.830002 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.830040 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.830332 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.330319615 +0000 UTC m=+143.187506324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.872799 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.934516 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.934781 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.934812 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87cfg\" (UniqueName: \"kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.934842 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.935236 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.935472 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:43 crc kubenswrapper[4797]: E0104 11:50:43.935527 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.435513297 +0000 UTC m=+143.292700006 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.949285 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:50:43 crc kubenswrapper[4797]: I0104 11:50:43.950187 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.037212 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87cfg\" (UniqueName: \"kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg\") pod \"certified-operators-2wl2p\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.038029 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8jtm\" (UniqueName: \"kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.038101 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.038134 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.038198 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.038473 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.538457412 +0000 UTC m=+143.395644121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.055321 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.057115 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.103772 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.106085 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.137562 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.140626 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.140873 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8jtm\" (UniqueName: \"kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.140929 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25scc\" (UniqueName: \"kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.140954 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.140972 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.141317 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.641300573 +0000 UTC m=+143.498487282 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.141352 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.141418 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.141440 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.141677 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.641670703 +0000 UTC m=+143.498857412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.142510 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.142720 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.158491 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vvjgp" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.193009 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8jtm\" (UniqueName: \"kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm\") pod \"community-operators-5b2cz\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.216420 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:44 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:44 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:44 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.216472 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.216901 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" event={"ID":"b596a0e4-953f-462f-a68d-58cdfdb6294b","Type":"ContainerStarted","Data":"7ade41a2ee1c2b37e2b967d212691b45ed3f228af3fc1312e68b5132666eefec"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.231881 4797 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.243071 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" event={"ID":"6ed91194-b05f-42a5-a8cd-6a94299b2b01","Type":"ContainerStarted","Data":"7ee489f2910bbcdf1976b3423fc8ad40cff280e6bda798f195bfb3da65df78df"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.243115 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" event={"ID":"6ed91194-b05f-42a5-a8cd-6a94299b2b01","Type":"ContainerStarted","Data":"978081a88a439f5b14f2ea5ee4b5d97e049a22095871be27223ad1c68648f65f"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.246547 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.246836 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25scc\" (UniqueName: \"kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.246873 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.246954 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.247473 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.247554 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.747535693 +0000 UTC m=+143.604722402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.248340 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.251755 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" event={"ID":"d7c0be23-efaf-490e-a685-d3cf605dd7ac","Type":"ContainerStarted","Data":"e34c87acc42538338d9f8b3b6b31fe4b8eab26e255f67bfe342b53d470ea2dac"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.263672 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.283685 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25scc\" (UniqueName: \"kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc\") pod \"certified-operators-bz6gm\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.291971 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-gzw7t" podStartSLOduration=123.291942845 podStartE2EDuration="2m3.291942845s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.261347055 +0000 UTC m=+143.118533764" watchObservedRunningTime="2026-01-04 11:50:44.291942845 +0000 UTC m=+143.149129554" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.293181 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z6g55" podStartSLOduration=123.293177058 podStartE2EDuration="2m3.293177058s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.287459998 +0000 UTC m=+143.144646707" watchObservedRunningTime="2026-01-04 11:50:44.293177058 +0000 UTC m=+143.150363767" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.304534 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" event={"ID":"7a889bd9-728a-4eb5-bc26-76dfa255ae2d","Type":"ContainerStarted","Data":"fdf04eda58e5ca7caca7f7f92c9cdb7c8f8c8c8f3779ce9050c6229eca29be23"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.304585 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" event={"ID":"7a889bd9-728a-4eb5-bc26-76dfa255ae2d","Type":"ContainerStarted","Data":"62ac887aef57accacca1f550badb83dcab08b0344642a93bfaddaaedeae7b7bc"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.321872 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" event={"ID":"5993e116-1e4f-47ba-a301-47a026bdbf14","Type":"ContainerStarted","Data":"03e323b756b695f27416c0f70eb92c3ef368ecc0d2d856fdc64990a569e754be"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.337645 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d2pjn" event={"ID":"ac79c571-f4ba-4eee-850e-7c3ca6465535","Type":"ContainerStarted","Data":"d0eb302908155b0404658a9d102a6166ebdf25c826f41890da39d582f13fcb77"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.353775 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.355030 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.855019426 +0000 UTC m=+143.712206135 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.361455 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66qs6" podStartSLOduration=123.361438514 podStartE2EDuration="2m3.361438514s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.326937991 +0000 UTC m=+143.184124700" watchObservedRunningTime="2026-01-04 11:50:44.361438514 +0000 UTC m=+143.218625223" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.396243 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87bml" event={"ID":"929fa979-e34b-4512-baff-7d6a4ab601ce","Type":"ContainerStarted","Data":"86b0061c0e33b80697d1d8b42f4a16fd006a78f979cf88e2a9bb07c3b3aed778"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.396288 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87bml" event={"ID":"929fa979-e34b-4512-baff-7d6a4ab601ce","Type":"ContainerStarted","Data":"5e34edc21ec07063456153ea88fa2ede46b540e10a6796a6bb07b8819e142505"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.429242 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bsz5f" event={"ID":"bf9d4794-e0c1-4200-912a-57e7c34d7250","Type":"ContainerStarted","Data":"a73a4141ba54e7e7bb02725b82d6e123157ec980219cfbbf92062e9276953475"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.438585 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-nl9mh" podStartSLOduration=123.438570292 podStartE2EDuration="2m3.438570292s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.363448527 +0000 UTC m=+143.220635236" watchObservedRunningTime="2026-01-04 11:50:44.438570292 +0000 UTC m=+143.295756991" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.440237 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.456623 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.457557 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hmdpk" event={"ID":"e639d5c0-ac9a-4af9-81a0-a4932eb5e197","Type":"ContainerStarted","Data":"4687bb8ec76036d9badfa84ebf0b6f8264de72fde44b3adb8656717901441d26"} Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.457910 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:44.957862507 +0000 UTC m=+143.815049276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.484876 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" event={"ID":"0101a1a3-cd94-49dc-8a5b-9876927cdfab","Type":"ContainerStarted","Data":"fcafd77b610f052328617d10b234c91334cdfb18736d78edee18d7a3218adea5"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.484942 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" event={"ID":"0101a1a3-cd94-49dc-8a5b-9876927cdfab","Type":"ContainerStarted","Data":"50393170c1668f8e5e3742b79f4d122396c20d4be84dc04aea6ebab36ab90bd6"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.485464 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.491228 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" event={"ID":"346a0026-3d33-4d35-9a7d-a622071ce541","Type":"ContainerStarted","Data":"ca020db5453e633805e1e20e1c120407792043248c304af1a641e269e38adebd"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.491907 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.513592 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" event={"ID":"c540aa4c-7310-40c0-b929-11b5d21e59fb","Type":"ContainerStarted","Data":"e3a59e455dbf579bbf9bb1f12dbc88dc4a20f32941a72021637ac12847ebb7a1"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.515917 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-nk574" podStartSLOduration=123.515907446 podStartE2EDuration="2m3.515907446s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.514610832 +0000 UTC m=+143.371797541" watchObservedRunningTime="2026-01-04 11:50:44.515907446 +0000 UTC m=+143.373094155" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.544683 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" podStartSLOduration=123.544666809 podStartE2EDuration="2m3.544666809s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.542718098 +0000 UTC m=+143.399904807" watchObservedRunningTime="2026-01-04 11:50:44.544666809 +0000 UTC m=+143.401853518" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.573893 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-25vnt" event={"ID":"3e5aef0e-7435-41ec-911a-64f5e9b1ff58","Type":"ContainerStarted","Data":"160e682f874048e7db6e6a3963f9e09385962658ad384615fdc459959d8898e9"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.573936 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-25vnt" event={"ID":"3e5aef0e-7435-41ec-911a-64f5e9b1ff58","Type":"ContainerStarted","Data":"53b23e67f3edcdaf4b3a2e96d321afc1bfef4718cad291ac88786c1059f0b2ac"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.574578 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.575171 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.576721 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:45.076708877 +0000 UTC m=+143.933895586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6whj" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.596188 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" event={"ID":"107f145c-ad17-499f-ae0b-55c34e0d04e6","Type":"ContainerStarted","Data":"914a93af12448e224a1092581b5fe75d270765800fd1bdb972a47439f83c4496"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.596397 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" event={"ID":"107f145c-ad17-499f-ae0b-55c34e0d04e6","Type":"ContainerStarted","Data":"23741817c21da705113a5286fb105918200c183a2de820c5cba245d3bf588092"} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.613170 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" podStartSLOduration=123.613147671 podStartE2EDuration="2m3.613147671s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.578249328 +0000 UTC m=+143.435436037" watchObservedRunningTime="2026-01-04 11:50:44.613147671 +0000 UTC m=+143.470334380" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.614227 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.615693 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-25vnt" podStartSLOduration=8.615683437 podStartE2EDuration="8.615683437s" podCreationTimestamp="2026-01-04 11:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.605567213 +0000 UTC m=+143.462753912" watchObservedRunningTime="2026-01-04 11:50:44.615683437 +0000 UTC m=+143.472870146" Jan 04 11:50:44 crc kubenswrapper[4797]: W0104 11:50:44.625002 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07ab4232_cce0_4cc1_8d46_6fe5d9c78ca5.slice/crio-9f9e5967fda4d11f0eacc98ae13680d098a8ee8c845a6f00fe3e9a057f1eb855 WatchSource:0}: Error finding container 9f9e5967fda4d11f0eacc98ae13680d098a8ee8c845a6f00fe3e9a057f1eb855: Status 404 returned error can't find the container with id 9f9e5967fda4d11f0eacc98ae13680d098a8ee8c845a6f00fe3e9a057f1eb855 Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.640679 4797 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-04T11:50:44.231918474Z","Handler":null,"Name":""} Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.641483 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" podStartSLOduration=123.641463062 podStartE2EDuration="2m3.641463062s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:44.637980691 +0000 UTC m=+143.495167420" watchObservedRunningTime="2026-01-04 11:50:44.641463062 +0000 UTC m=+143.498649771" Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.676021 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:44 crc kubenswrapper[4797]: E0104 11:50:44.681250 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:45.181219042 +0000 UTC m=+144.038405771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.703047 4797 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.703273 4797 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.819032 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:50:44 crc kubenswrapper[4797]: I0104 11:50:44.823059 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.199565 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:45 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:45 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:45 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.199612 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.331886 4797 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.332013 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.407430 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.415361 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6whj\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.421500 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlg2p" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.450474 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:45 crc kubenswrapper[4797]: W0104 11:50:45.480656 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9263a31b_4c9c_4afb_8ff8_d4b5d1da489b.slice/crio-167566ca4939bf50dcfa1576aaddfaecc581cc46a3b973b7d6427aaae9e9e6c8 WatchSource:0}: Error finding container 167566ca4939bf50dcfa1576aaddfaecc581cc46a3b973b7d6427aaae9e9e6c8: Status 404 returned error can't find the container with id 167566ca4939bf50dcfa1576aaddfaecc581cc46a3b973b7d6427aaae9e9e6c8 Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.597681 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.627412 4797 generic.go:334] "Generic (PLEG): container finished" podID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerID="188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe" exitCode=0 Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.627450 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerDied","Data":"188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.627509 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerStarted","Data":"9f9e5967fda4d11f0eacc98ae13680d098a8ee8c845a6f00fe3e9a057f1eb855"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.629764 4797 generic.go:334] "Generic (PLEG): container finished" podID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerID="e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a" exitCode=0 Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.630349 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerDied","Data":"e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.630373 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerStarted","Data":"a6fc021a92a99fb3b550c1452543e06b0d434478d7dcba32c509d4a6832caa7c"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.631754 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerStarted","Data":"167566ca4939bf50dcfa1576aaddfaecc581cc46a3b973b7d6427aaae9e9e6c8"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.633758 4797 generic.go:334] "Generic (PLEG): container finished" podID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerID="a6002a14f1c848d275b8dfd66747e90a90a9de19da9e471e1dffad790e113097" exitCode=0 Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.633799 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerDied","Data":"a6002a14f1c848d275b8dfd66747e90a90a9de19da9e471e1dffad790e113097"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.633815 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerStarted","Data":"099da130895470cdea423958ed559fd4d3d05e6cfea2918425de5c718c1273ba"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.639554 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87bml" event={"ID":"929fa979-e34b-4512-baff-7d6a4ab601ce","Type":"ContainerStarted","Data":"f7f4beac3ddbe20a6b002193a7b9f5113776d693d4bc20c21a114177524baa21"} Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.646145 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.670039 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.885086 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.907189 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.916105 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:50:45 crc kubenswrapper[4797]: I0104 11:50:45.918757 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.061675 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.061978 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.062074 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j44zz\" (UniqueName: \"kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.163116 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.163165 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.163234 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j44zz\" (UniqueName: \"kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.164183 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.164405 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.186797 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j44zz\" (UniqueName: \"kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz\") pod \"redhat-marketplace-m2j8n\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.207271 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:46 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:46 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:46 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.207338 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.249923 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.284329 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.285614 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.300308 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.355797 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.366918 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pphcd\" (UniqueName: \"kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.367129 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.367152 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.468129 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pphcd\" (UniqueName: \"kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.468444 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.468463 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.469066 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.469264 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.513081 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pphcd\" (UniqueName: \"kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd\") pod \"redhat-marketplace-zpvl4\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.608844 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.613119 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.645594 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" event={"ID":"8ab0052f-8002-48bd-882a-b304ed2b2d91","Type":"ContainerStarted","Data":"8bc5e0eafd0dbf7605057f3fc25f1150265558c84d01317a45d0d969de014b48"} Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.645637 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" event={"ID":"8ab0052f-8002-48bd-882a-b304ed2b2d91","Type":"ContainerStarted","Data":"4e737fc6839c870eaba0033c876d0ad5a88cad9abb0d127755172d29cb82ad35"} Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.646234 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.649582 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerStarted","Data":"40550f23c70f29925f7769ad6f70b4d63b1d68374dc3c74d862bbdfefbc01ff8"} Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.651922 4797 generic.go:334] "Generic (PLEG): container finished" podID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerID="d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac" exitCode=0 Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.652007 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerDied","Data":"d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac"} Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.656975 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87bml" event={"ID":"929fa979-e34b-4512-baff-7d6a4ab601ce","Type":"ContainerStarted","Data":"7dcfe2aeb33ddf219d755a3b75fc6e1c450d601996dce5d7b30136e836e74232"} Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.681192 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" podStartSLOduration=125.68117482 podStartE2EDuration="2m5.68117482s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:46.664031501 +0000 UTC m=+145.521218230" watchObservedRunningTime="2026-01-04 11:50:46.68117482 +0000 UTC m=+145.538361529" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.683083 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.683950 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.685282 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.705904 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.730864 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-87bml" podStartSLOduration=10.73084275 podStartE2EDuration="10.73084275s" podCreationTimestamp="2026-01-04 11:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:46.729545256 +0000 UTC m=+145.586731975" watchObservedRunningTime="2026-01-04 11:50:46.73084275 +0000 UTC m=+145.588029459" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.876176 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.876324 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.876344 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2b59\" (UniqueName: \"kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.880231 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.881342 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.893556 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.977833 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.977880 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnplt\" (UniqueName: \"kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.977901 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.978088 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.978142 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2b59\" (UniqueName: \"kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.978218 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.978567 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:46 crc kubenswrapper[4797]: I0104 11:50:46.978610 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.007820 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2b59\" (UniqueName: \"kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59\") pod \"redhat-operators-smn67\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.027037 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.035556 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:50:47 crc kubenswrapper[4797]: W0104 11:50:47.047778 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99d3aa96_c341_4adc_ae7f_93d067347315.slice/crio-aba8a513998fa5f924a944b3f2cc1a18d2b5c69fe2042b27ea5573a7a4fc1b80 WatchSource:0}: Error finding container aba8a513998fa5f924a944b3f2cc1a18d2b5c69fe2042b27ea5573a7a4fc1b80: Status 404 returned error can't find the container with id aba8a513998fa5f924a944b3f2cc1a18d2b5c69fe2042b27ea5573a7a4fc1b80 Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.079162 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.079969 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnplt\" (UniqueName: \"kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.080291 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.079918 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.080804 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.112538 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnplt\" (UniqueName: \"kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt\") pod \"redhat-operators-ck5tb\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.198864 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:47 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:47 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:47 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.199218 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.243241 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.286056 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:50:47 crc kubenswrapper[4797]: W0104 11:50:47.311525 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf97f7bbd_7702_4344_b235_056f577f6b55.slice/crio-25397d8a3d0e7935f3b94173aeb0d1f5601d85ee8fcaf18a5413329c347ec3de WatchSource:0}: Error finding container 25397d8a3d0e7935f3b94173aeb0d1f5601d85ee8fcaf18a5413329c347ec3de: Status 404 returned error can't find the container with id 25397d8a3d0e7935f3b94173aeb0d1f5601d85ee8fcaf18a5413329c347ec3de Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.343496 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.344253 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.347861 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.348458 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.349484 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.458278 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.484590 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.488629 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.488665 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.488724 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.488772 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.489547 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.493380 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.590278 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.590345 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.590379 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.590404 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.590746 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.594052 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.601212 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.605251 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.608702 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.663100 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.667053 4797 generic.go:334] "Generic (PLEG): container finished" podID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerID="33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e" exitCode=0 Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.667204 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerDied","Data":"33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.790530 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.796623 4797 generic.go:334] "Generic (PLEG): container finished" podID="99d3aa96-c341-4adc-ae7f-93d067347315" containerID="f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2" exitCode=0 Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.796664 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerDied","Data":"f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.796704 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerStarted","Data":"aba8a513998fa5f924a944b3f2cc1a18d2b5c69fe2042b27ea5573a7a4fc1b80"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.798110 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.805583 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerStarted","Data":"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.805648 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerStarted","Data":"760651836a3667be4927f7510b29c49d2675976991970654277084603ef98fd2"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.813680 4797 generic.go:334] "Generic (PLEG): container finished" podID="f97f7bbd-7702-4344-b235-056f577f6b55" containerID="ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9" exitCode=0 Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.813848 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerDied","Data":"ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.813892 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerStarted","Data":"25397d8a3d0e7935f3b94173aeb0d1f5601d85ee8fcaf18a5413329c347ec3de"} Jan 04 11:50:47 crc kubenswrapper[4797]: I0104 11:50:47.851169 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:48 crc kubenswrapper[4797]: W0104 11:50:48.083946 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-af4103f088832e5fe46ea0808c8bd3c8ebe4a18c09558aa86c5f9198cc9c1b79 WatchSource:0}: Error finding container af4103f088832e5fe46ea0808c8bd3c8ebe4a18c09558aa86c5f9198cc9c1b79: Status 404 returned error can't find the container with id af4103f088832e5fe46ea0808c8bd3c8ebe4a18c09558aa86c5f9198cc9c1b79 Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.105101 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.105561 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.112695 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.112996 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.113702 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.121250 4797 patch_prober.go:28] interesting pod/console-f9d7485db-wftnf container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.121286 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wftnf" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" containerName="console" probeResult="failure" output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.151488 4797 patch_prober.go:28] interesting pod/downloads-7954f5f757-vg9x9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.151962 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vg9x9" podUID="bac1cac5-6f43-495f-9a7b-b5a1e13d5898" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.151596 4797 patch_prober.go:28] interesting pod/downloads-7954f5f757-vg9x9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.152386 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vg9x9" podUID="bac1cac5-6f43-495f-9a7b-b5a1e13d5898" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.199498 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:48 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:48 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:48 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.199572 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:48 crc kubenswrapper[4797]: W0104 11:50:48.202015 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-f8aa4651cbe307dff2b2976e8361afd97fbd604392a02f816c820c6702c26840 WatchSource:0}: Error finding container f8aa4651cbe307dff2b2976e8361afd97fbd604392a02f816c820c6702c26840: Status 404 returned error can't find the container with id f8aa4651cbe307dff2b2976e8361afd97fbd604392a02f816c820c6702c26840 Jan 04 11:50:48 crc kubenswrapper[4797]: W0104 11:50:48.345042 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-df14f1f713747f88f24078d524b84853d9d95abd609a7c13b92e886677c3639f WatchSource:0}: Error finding container df14f1f713747f88f24078d524b84853d9d95abd609a7c13b92e886677c3639f: Status 404 returned error can't find the container with id df14f1f713747f88f24078d524b84853d9d95abd609a7c13b92e886677c3639f Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.368197 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.368258 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.375038 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.823485 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"df14f1f713747f88f24078d524b84853d9d95abd609a7c13b92e886677c3639f"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.824970 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"50cbf7f5fc31a00f0989f1dd493a8a6fa6b5f1ee29c6c495127f68c74ae789c1"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.825035 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f8aa4651cbe307dff2b2976e8361afd97fbd604392a02f816c820c6702c26840"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.827097 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d500e2f94cac3e38a1e566ead358cc10a3ad7029ac50135f818e3e58f3e3a565"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.827128 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"af4103f088832e5fe46ea0808c8bd3c8ebe4a18c09558aa86c5f9198cc9c1b79"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.836062 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fea3f61e-b708-4038-94ba-b3b0fdc28c27","Type":"ContainerStarted","Data":"8704de5ee3ffdbcbcccec2cd2d5031d38e95794ad2e1efdb9dff706d44713c19"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.836101 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fea3f61e-b708-4038-94ba-b3b0fdc28c27","Type":"ContainerStarted","Data":"ca43bb88a83ce1515139bfb343a154a55b5205bef015be4eb8e9c934b2691752"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.838130 4797 generic.go:334] "Generic (PLEG): container finished" podID="5993e116-1e4f-47ba-a301-47a026bdbf14" containerID="03e323b756b695f27416c0f70eb92c3ef368ecc0d2d856fdc64990a569e754be" exitCode=0 Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.838172 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" event={"ID":"5993e116-1e4f-47ba-a301-47a026bdbf14","Type":"ContainerDied","Data":"03e323b756b695f27416c0f70eb92c3ef368ecc0d2d856fdc64990a569e754be"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.847280 4797 generic.go:334] "Generic (PLEG): container finished" podID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerID="63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae" exitCode=0 Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.847584 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerDied","Data":"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae"} Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.854873 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-9kvs8" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.856243 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.856227451 podStartE2EDuration="1.856227451s" podCreationTimestamp="2026-01-04 11:50:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:48.854446944 +0000 UTC m=+147.711633653" watchObservedRunningTime="2026-01-04 11:50:48.856227451 +0000 UTC m=+147.713414160" Jan 04 11:50:48 crc kubenswrapper[4797]: I0104 11:50:48.856597 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fr4c5" Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.196512 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.200811 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:49 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:49 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:49 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.200854 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.492924 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.493285 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.866031 4797 generic.go:334] "Generic (PLEG): container finished" podID="fea3f61e-b708-4038-94ba-b3b0fdc28c27" containerID="8704de5ee3ffdbcbcccec2cd2d5031d38e95794ad2e1efdb9dff706d44713c19" exitCode=0 Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.866086 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fea3f61e-b708-4038-94ba-b3b0fdc28c27","Type":"ContainerDied","Data":"8704de5ee3ffdbcbcccec2cd2d5031d38e95794ad2e1efdb9dff706d44713c19"} Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.871424 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3a50467f484be47e25b65636abb2e5bffddbbd155aaacc3d186fc0d906375a1d"} Jan 04 11:50:49 crc kubenswrapper[4797]: I0104 11:50:49.871455 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.199173 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:50 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:50 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:50 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.199223 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.229198 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.366108 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nklsc\" (UniqueName: \"kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc\") pod \"5993e116-1e4f-47ba-a301-47a026bdbf14\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.366548 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume\") pod \"5993e116-1e4f-47ba-a301-47a026bdbf14\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.366587 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume\") pod \"5993e116-1e4f-47ba-a301-47a026bdbf14\" (UID: \"5993e116-1e4f-47ba-a301-47a026bdbf14\") " Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.376822 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume" (OuterVolumeSpecName: "config-volume") pod "5993e116-1e4f-47ba-a301-47a026bdbf14" (UID: "5993e116-1e4f-47ba-a301-47a026bdbf14"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.386376 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5993e116-1e4f-47ba-a301-47a026bdbf14" (UID: "5993e116-1e4f-47ba-a301-47a026bdbf14"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.386797 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc" (OuterVolumeSpecName: "kube-api-access-nklsc") pod "5993e116-1e4f-47ba-a301-47a026bdbf14" (UID: "5993e116-1e4f-47ba-a301-47a026bdbf14"). InnerVolumeSpecName "kube-api-access-nklsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.468632 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5993e116-1e4f-47ba-a301-47a026bdbf14-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.468669 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5993e116-1e4f-47ba-a301-47a026bdbf14-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.468679 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nklsc\" (UniqueName: \"kubernetes.io/projected/5993e116-1e4f-47ba-a301-47a026bdbf14-kube-api-access-nklsc\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.889012 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.889723 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb" event={"ID":"5993e116-1e4f-47ba-a301-47a026bdbf14","Type":"ContainerDied","Data":"73aa9dc1c3659de68a540ffd50286ba57c684af80d97f6ed479da7a0d852167c"} Jan 04 11:50:50 crc kubenswrapper[4797]: I0104 11:50:50.889750 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73aa9dc1c3659de68a540ffd50286ba57c684af80d97f6ed479da7a0d852167c" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.206941 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:51 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:51 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:51 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.207366 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.382787 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.459769 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access\") pod \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.459926 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir\") pod \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\" (UID: \"fea3f61e-b708-4038-94ba-b3b0fdc28c27\") " Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.460061 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fea3f61e-b708-4038-94ba-b3b0fdc28c27" (UID: "fea3f61e-b708-4038-94ba-b3b0fdc28c27"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.460464 4797 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.481900 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fea3f61e-b708-4038-94ba-b3b0fdc28c27" (UID: "fea3f61e-b708-4038-94ba-b3b0fdc28c27"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.564856 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fea3f61e-b708-4038-94ba-b3b0fdc28c27-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.926335 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fea3f61e-b708-4038-94ba-b3b0fdc28c27","Type":"ContainerDied","Data":"ca43bb88a83ce1515139bfb343a154a55b5205bef015be4eb8e9c934b2691752"} Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.926373 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca43bb88a83ce1515139bfb343a154a55b5205bef015be4eb8e9c934b2691752" Jan 04 11:50:51 crc kubenswrapper[4797]: I0104 11:50:51.926430 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.199458 4797 patch_prober.go:28] interesting pod/router-default-5444994796-zdnhw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:52 crc kubenswrapper[4797]: [-]has-synced failed: reason withheld Jan 04 11:50:52 crc kubenswrapper[4797]: [+]process-running ok Jan 04 11:50:52 crc kubenswrapper[4797]: healthz check failed Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.199524 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdnhw" podUID="2ecce101-e9fe-4612-8306-94165bb43460" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.837961 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:52 crc kubenswrapper[4797]: E0104 11:50:52.843848 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5993e116-1e4f-47ba-a301-47a026bdbf14" containerName="collect-profiles" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.843871 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5993e116-1e4f-47ba-a301-47a026bdbf14" containerName="collect-profiles" Jan 04 11:50:52 crc kubenswrapper[4797]: E0104 11:50:52.843891 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fea3f61e-b708-4038-94ba-b3b0fdc28c27" containerName="pruner" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.843897 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fea3f61e-b708-4038-94ba-b3b0fdc28c27" containerName="pruner" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.844035 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fea3f61e-b708-4038-94ba-b3b0fdc28c27" containerName="pruner" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.844048 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5993e116-1e4f-47ba-a301-47a026bdbf14" containerName="collect-profiles" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.844348 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.844422 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.847225 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.849481 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.906690 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:52 crc kubenswrapper[4797]: I0104 11:50:52.906760 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.007644 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.007693 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.007786 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.028356 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.172958 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.198698 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.201066 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zdnhw" Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.825171 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:53 crc kubenswrapper[4797]: I0104 11:50:53.954941 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6d817c86-c13e-4f1f-9abd-674437ab06df","Type":"ContainerStarted","Data":"e0f52fd8e735dbe6d2d1b85da59bce7437d699bdf47aa2ac4b1906c570291f57"} Jan 04 11:50:54 crc kubenswrapper[4797]: I0104 11:50:54.047766 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-25vnt" Jan 04 11:50:56 crc kubenswrapper[4797]: I0104 11:50:56.014839 4797 generic.go:334] "Generic (PLEG): container finished" podID="6d817c86-c13e-4f1f-9abd-674437ab06df" containerID="752b0cdfb350469f36b2c3635d7665924f99e814ab2af96669ffb7285ceea5d8" exitCode=0 Jan 04 11:50:56 crc kubenswrapper[4797]: I0104 11:50:56.015009 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6d817c86-c13e-4f1f-9abd-674437ab06df","Type":"ContainerDied","Data":"752b0cdfb350469f36b2c3635d7665924f99e814ab2af96669ffb7285ceea5d8"} Jan 04 11:50:56 crc kubenswrapper[4797]: I0104 11:50:56.910397 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 11:50:58 crc kubenswrapper[4797]: I0104 11:50:58.116431 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:58 crc kubenswrapper[4797]: I0104 11:50:58.121653 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 11:50:58 crc kubenswrapper[4797]: I0104 11:50:58.170086 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-vg9x9" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.276441 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.395885 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir\") pod \"6d817c86-c13e-4f1f-9abd-674437ab06df\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.395981 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access\") pod \"6d817c86-c13e-4f1f-9abd-674437ab06df\" (UID: \"6d817c86-c13e-4f1f-9abd-674437ab06df\") " Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.395980 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6d817c86-c13e-4f1f-9abd-674437ab06df" (UID: "6d817c86-c13e-4f1f-9abd-674437ab06df"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.396359 4797 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d817c86-c13e-4f1f-9abd-674437ab06df-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.408210 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6d817c86-c13e-4f1f-9abd-674437ab06df" (UID: "6d817c86-c13e-4f1f-9abd-674437ab06df"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.499161 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d817c86-c13e-4f1f-9abd-674437ab06df-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.560038 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.560247 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" containerID="cri-o://40a60f6ec432f29e01dc2c711f48cff255829dfaff598c3f0cfd7b361ba9a400" gracePeriod=30 Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.597022 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:51:02 crc kubenswrapper[4797]: I0104 11:51:02.597223 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" containerID="cri-o://5639c35657bb1691897ef570575f46e5d2aef7465661320e9942e9edd9522b76" gracePeriod=30 Jan 04 11:51:03 crc kubenswrapper[4797]: I0104 11:51:03.057890 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6d817c86-c13e-4f1f-9abd-674437ab06df","Type":"ContainerDied","Data":"e0f52fd8e735dbe6d2d1b85da59bce7437d699bdf47aa2ac4b1906c570291f57"} Jan 04 11:51:03 crc kubenswrapper[4797]: I0104 11:51:03.058443 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0f52fd8e735dbe6d2d1b85da59bce7437d699bdf47aa2ac4b1906c570291f57" Jan 04 11:51:03 crc kubenswrapper[4797]: I0104 11:51:03.058215 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:51:03 crc kubenswrapper[4797]: I0104 11:51:03.920411 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:51:03 crc kubenswrapper[4797]: I0104 11:51:03.933522 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c264f05-2fcc-422e-a717-d766b27bfd5b-metrics-certs\") pod \"network-metrics-daemon-v8lzg\" (UID: \"4c264f05-2fcc-422e-a717-d766b27bfd5b\") " pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:51:04 crc kubenswrapper[4797]: I0104 11:51:04.094487 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-v8lzg" Jan 04 11:51:05 crc kubenswrapper[4797]: I0104 11:51:05.073375 4797 generic.go:334] "Generic (PLEG): container finished" podID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerID="40a60f6ec432f29e01dc2c711f48cff255829dfaff598c3f0cfd7b361ba9a400" exitCode=0 Jan 04 11:51:05 crc kubenswrapper[4797]: I0104 11:51:05.073725 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" event={"ID":"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39","Type":"ContainerDied","Data":"40a60f6ec432f29e01dc2c711f48cff255829dfaff598c3f0cfd7b361ba9a400"} Jan 04 11:51:05 crc kubenswrapper[4797]: I0104 11:51:05.681772 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:51:06 crc kubenswrapper[4797]: I0104 11:51:06.082138 4797 generic.go:334] "Generic (PLEG): container finished" podID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerID="5639c35657bb1691897ef570575f46e5d2aef7465661320e9942e9edd9522b76" exitCode=0 Jan 04 11:51:06 crc kubenswrapper[4797]: I0104 11:51:06.082192 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" event={"ID":"81081cdd-1c50-48d9-b176-832427d3ce5f","Type":"ContainerDied","Data":"5639c35657bb1691897ef570575f46e5d2aef7465661320e9942e9edd9522b76"} Jan 04 11:51:07 crc kubenswrapper[4797]: I0104 11:51:07.988334 4797 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wt6hc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 04 11:51:07 crc kubenswrapper[4797]: I0104 11:51:07.988404 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 04 11:51:08 crc kubenswrapper[4797]: I0104 11:51:08.245958 4797 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6vf5r container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 04 11:51:08 crc kubenswrapper[4797]: I0104 11:51:08.246047 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 04 11:51:18 crc kubenswrapper[4797]: I0104 11:51:18.934967 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-64twc" Jan 04 11:51:18 crc kubenswrapper[4797]: I0104 11:51:18.988564 4797 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wt6hc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:51:18 crc kubenswrapper[4797]: I0104 11:51:18.988969 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 11:51:19 crc kubenswrapper[4797]: I0104 11:51:19.246363 4797 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6vf5r container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": context deadline exceeded" start-of-body= Jan 04 11:51:19 crc kubenswrapper[4797]: I0104 11:51:19.246704 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": context deadline exceeded" Jan 04 11:51:19 crc kubenswrapper[4797]: I0104 11:51:19.492764 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:51:19 crc kubenswrapper[4797]: I0104 11:51:19.493070 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:51:24 crc kubenswrapper[4797]: E0104 11:51:24.374647 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 04 11:51:24 crc kubenswrapper[4797]: E0104 11:51:24.375151 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mnplt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-ck5tb_openshift-marketplace(c698bb8a-7f5e-40c4-b757-685b34fbe709): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:24 crc kubenswrapper[4797]: E0104 11:51:24.376382 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-ck5tb" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" Jan 04 11:51:26 crc kubenswrapper[4797]: E0104 11:51:26.374436 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-ck5tb" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" Jan 04 11:51:26 crc kubenswrapper[4797]: E0104 11:51:26.888172 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 04 11:51:26 crc kubenswrapper[4797]: E0104 11:51:26.888317 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r8jtm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-5b2cz_openshift-marketplace(0ccf58b6-d6dc-4830-b997-0ab63ee5e6df): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:26 crc kubenswrapper[4797]: E0104 11:51:26.889552 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-5b2cz" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.031859 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.238113 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:28 crc kubenswrapper[4797]: E0104 11:51:28.238943 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d817c86-c13e-4f1f-9abd-674437ab06df" containerName="pruner" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.239229 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d817c86-c13e-4f1f-9abd-674437ab06df" containerName="pruner" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.239715 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d817c86-c13e-4f1f-9abd-674437ab06df" containerName="pruner" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.241314 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.248257 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.248856 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.261385 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.299772 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.299886 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.402601 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.402734 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.402862 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.433252 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.595209 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.987666 4797 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wt6hc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:51:28 crc kubenswrapper[4797]: I0104 11:51:28.988583 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 11:51:29 crc kubenswrapper[4797]: I0104 11:51:29.245832 4797 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6vf5r container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:51:29 crc kubenswrapper[4797]: I0104 11:51:29.245912 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.344279 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-5b2cz" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.362504 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.362629 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vnht7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lrkgj_openshift-marketplace(97d1cbe8-6a84-4cc0-a4af-0f66635aa60b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.364188 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lrkgj" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.884543 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.884716 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pphcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zpvl4_openshift-marketplace(99d3aa96-c341-4adc-ae7f-93d067347315): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:32 crc kubenswrapper[4797]: E0104 11:51:32.885917 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zpvl4" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.635515 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.636696 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.640478 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.770872 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.770936 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.770965 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.872431 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.872476 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.872503 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.872583 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.872658 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.895958 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access\") pod \"installer-9-crc\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:33 crc kubenswrapper[4797]: I0104 11:51:33.970010 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.449534 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-lrkgj" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.449592 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zpvl4" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.551187 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.551534 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-87cfg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2wl2p_openshift-marketplace(07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.552962 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2wl2p" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.554875 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.556506 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.556733 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-25scc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bz6gm_openshift-marketplace(9263a31b-4c9c-4afb-8ff8-d4b5d1da489b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.557903 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bz6gm" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.560709 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.580412 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.580611 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.580623 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.580644 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.580650 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.580737 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" containerName="route-controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.580754 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" containerName="controller-manager" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.581201 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.593909 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config\") pod \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594166 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca\") pod \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594188 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tw8f\" (UniqueName: \"kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f\") pod \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594208 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert\") pod \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594222 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca\") pod \"81081cdd-1c50-48d9-b176-832427d3ce5f\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594247 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config\") pod \"81081cdd-1c50-48d9-b176-832427d3ce5f\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594264 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk27w\" (UniqueName: \"kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w\") pod \"81081cdd-1c50-48d9-b176-832427d3ce5f\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594306 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles\") pod \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\" (UID: \"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594335 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert\") pod \"81081cdd-1c50-48d9-b176-832427d3ce5f\" (UID: \"81081cdd-1c50-48d9-b176-832427d3ce5f\") " Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594402 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594449 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594471 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlcvm\" (UniqueName: \"kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594499 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.594514 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.598505 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" (UID: "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.599713 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca" (OuterVolumeSpecName: "client-ca") pod "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" (UID: "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.600348 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config" (OuterVolumeSpecName: "config") pod "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" (UID: "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.600417 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.601176 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca" (OuterVolumeSpecName: "client-ca") pod "81081cdd-1c50-48d9-b176-832427d3ce5f" (UID: "81081cdd-1c50-48d9-b176-832427d3ce5f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.601629 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config" (OuterVolumeSpecName: "config") pod "81081cdd-1c50-48d9-b176-832427d3ce5f" (UID: "81081cdd-1c50-48d9-b176-832427d3ce5f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.603500 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w" (OuterVolumeSpecName: "kube-api-access-vk27w") pod "81081cdd-1c50-48d9-b176-832427d3ce5f" (UID: "81081cdd-1c50-48d9-b176-832427d3ce5f"). InnerVolumeSpecName "kube-api-access-vk27w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.611370 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "81081cdd-1c50-48d9-b176-832427d3ce5f" (UID: "81081cdd-1c50-48d9-b176-832427d3ce5f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.613192 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" (UID: "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.614642 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.614787 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j44zz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-m2j8n_openshift-marketplace(03c91ee7-6f6e-4c7c-8501-dd36e81e5421): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.616070 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-m2j8n" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.619399 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f" (OuterVolumeSpecName: "kube-api-access-9tw8f") pod "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" (UID: "e47dfea4-b2ef-4f72-8e66-94ebd69e0b39"). InnerVolumeSpecName "kube-api-access-9tw8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.694844 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.694906 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.694924 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlcvm\" (UniqueName: \"kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.694952 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.694967 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695172 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695186 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk27w\" (UniqueName: \"kubernetes.io/projected/81081cdd-1c50-48d9-b176-832427d3ce5f-kube-api-access-vk27w\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695195 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695204 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81081cdd-1c50-48d9-b176-832427d3ce5f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695212 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695220 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695227 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tw8f\" (UniqueName: \"kubernetes.io/projected/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-kube-api-access-9tw8f\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695235 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.695243 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81081cdd-1c50-48d9-b176-832427d3ce5f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.698595 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.698908 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.700297 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.700898 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.717225 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlcvm\" (UniqueName: \"kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm\") pod \"controller-manager-7547fdc57b-hv6ts\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.729362 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-v8lzg"] Jan 04 11:51:35 crc kubenswrapper[4797]: W0104 11:51:35.738644 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c264f05_2fcc_422e_a717_d766b27bfd5b.slice/crio-e23201e90e86c03835a73cb54601dc6af9eebe283ecaba0cfa2a27fec2c309a3 WatchSource:0}: Error finding container e23201e90e86c03835a73cb54601dc6af9eebe283ecaba0cfa2a27fec2c309a3: Status 404 returned error can't find the container with id e23201e90e86c03835a73cb54601dc6af9eebe283ecaba0cfa2a27fec2c309a3 Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.761883 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.762060 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h2b59,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-smn67_openshift-marketplace(f97f7bbd-7702-4344-b235-056f577f6b55): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:35 crc kubenswrapper[4797]: E0104 11:51:35.763402 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-smn67" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.944903 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.987201 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:35 crc kubenswrapper[4797]: I0104 11:51:35.993476 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:36 crc kubenswrapper[4797]: W0104 11:51:36.003601 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode0daf140_d426_4fa0_a17c_8bfe93b120c4.slice/crio-ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3 WatchSource:0}: Error finding container ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3: Status 404 returned error can't find the container with id ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3 Jan 04 11:51:36 crc kubenswrapper[4797]: W0104 11:51:36.004905 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podafa53643_f607_45d0_b4f4_807edcd89c74.slice/crio-5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3 WatchSource:0}: Error finding container 5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3: Status 404 returned error can't find the container with id 5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3 Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.161497 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.272388 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"afa53643-f607-45d0-b4f4-807edcd89c74","Type":"ContainerStarted","Data":"5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.274077 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" event={"ID":"81081cdd-1c50-48d9-b176-832427d3ce5f","Type":"ContainerDied","Data":"1ab4b93bf5130f594e47e5aa743bfffce7dfc8001cb1c318140a8a3b14a3ca64"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.274123 4797 scope.go:117] "RemoveContainer" containerID="5639c35657bb1691897ef570575f46e5d2aef7465661320e9942e9edd9522b76" Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.274122 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r" Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.275183 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e0daf140-d426-4fa0-a17c-8bfe93b120c4","Type":"ContainerStarted","Data":"ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.276198 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" event={"ID":"d922cdfa-0bf5-4f0e-9bff-9932bce71959","Type":"ContainerStarted","Data":"dc4826a97f93ce0f817d61708ba63305142a4642b43ff5e914e52c92e8f034d6"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.278199 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" event={"ID":"4c264f05-2fcc-422e-a717-d766b27bfd5b","Type":"ContainerStarted","Data":"3fe580175099008283c668ce25e0892a2bca90ec43f2a5f6a3187d7fa66c9f89"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.278226 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" event={"ID":"4c264f05-2fcc-422e-a717-d766b27bfd5b","Type":"ContainerStarted","Data":"e23201e90e86c03835a73cb54601dc6af9eebe283ecaba0cfa2a27fec2c309a3"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.281054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" event={"ID":"e47dfea4-b2ef-4f72-8e66-94ebd69e0b39","Type":"ContainerDied","Data":"173a2beae14788f000b4c3c143fb3c1ad3d272ee17f62b4e1dfd22a3b9913244"} Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.281322 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wt6hc" Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.304119 4797 scope.go:117] "RemoveContainer" containerID="40a60f6ec432f29e01dc2c711f48cff255829dfaff598c3f0cfd7b361ba9a400" Jan 04 11:51:36 crc kubenswrapper[4797]: E0104 11:51:36.304800 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2wl2p" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" Jan 04 11:51:36 crc kubenswrapper[4797]: E0104 11:51:36.304810 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bz6gm" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" Jan 04 11:51:36 crc kubenswrapper[4797]: E0104 11:51:36.304817 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-smn67" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" Jan 04 11:51:36 crc kubenswrapper[4797]: E0104 11:51:36.304825 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-m2j8n" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.351116 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.353310 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wt6hc"] Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.379790 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:51:36 crc kubenswrapper[4797]: I0104 11:51:36.382540 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6vf5r"] Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.286672 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" event={"ID":"d922cdfa-0bf5-4f0e-9bff-9932bce71959","Type":"ContainerStarted","Data":"531c875e523ee845c04ba43b614c4c770ea99c2bf2b6b2b0e9b358a78347254b"} Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.287204 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.290722 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-v8lzg" event={"ID":"4c264f05-2fcc-422e-a717-d766b27bfd5b","Type":"ContainerStarted","Data":"b31325882a60c691244b60fe9c289ca26b111aa65d448ab2491953e60e945715"} Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.291896 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.292918 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"afa53643-f607-45d0-b4f4-807edcd89c74","Type":"ContainerStarted","Data":"67e9885b7253d638e70b92fc0c91daff61317b0b3a900542debf162cbd5003c6"} Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.295169 4797 generic.go:334] "Generic (PLEG): container finished" podID="e0daf140-d426-4fa0-a17c-8bfe93b120c4" containerID="ebc1a43393fb4705e0a65fb12441cfac3d14a2a0f1ef9578ece7a6cd93e768a1" exitCode=0 Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.295224 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e0daf140-d426-4fa0-a17c-8bfe93b120c4","Type":"ContainerDied","Data":"ebc1a43393fb4705e0a65fb12441cfac3d14a2a0f1ef9578ece7a6cd93e768a1"} Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.308711 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" podStartSLOduration=15.308691824 podStartE2EDuration="15.308691824s" podCreationTimestamp="2026-01-04 11:51:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:37.301482756 +0000 UTC m=+196.158669465" watchObservedRunningTime="2026-01-04 11:51:37.308691824 +0000 UTC m=+196.165878523" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.319220 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-v8lzg" podStartSLOduration=176.319202509 podStartE2EDuration="2m56.319202509s" podCreationTimestamp="2026-01-04 11:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:37.31731321 +0000 UTC m=+196.174499919" watchObservedRunningTime="2026-01-04 11:51:37.319202509 +0000 UTC m=+196.176389218" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.351892 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.351869664 podStartE2EDuration="4.351869664s" podCreationTimestamp="2026-01-04 11:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:37.344650465 +0000 UTC m=+196.201837194" watchObservedRunningTime="2026-01-04 11:51:37.351869664 +0000 UTC m=+196.209056373" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.480602 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81081cdd-1c50-48d9-b176-832427d3ce5f" path="/var/lib/kubelet/pods/81081cdd-1c50-48d9-b176-832427d3ce5f/volumes" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.481353 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e47dfea4-b2ef-4f72-8e66-94ebd69e0b39" path="/var/lib/kubelet/pods/e47dfea4-b2ef-4f72-8e66-94ebd69e0b39/volumes" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.859313 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.860547 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.862742 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.863095 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.863233 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.863422 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.863638 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.863769 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:51:37 crc kubenswrapper[4797]: I0104 11:51:37.870354 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.024958 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.025021 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.025049 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2lkw\" (UniqueName: \"kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.025080 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.126484 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.126534 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.126559 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2lkw\" (UniqueName: \"kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.126605 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.127505 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.128642 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.134041 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.144328 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2lkw\" (UniqueName: \"kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw\") pod \"route-controller-manager-645999796d-kb4wm\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.184226 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.490980 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.530422 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access\") pod \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.530477 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir\") pod \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\" (UID: \"e0daf140-d426-4fa0-a17c-8bfe93b120c4\") " Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.530650 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e0daf140-d426-4fa0-a17c-8bfe93b120c4" (UID: "e0daf140-d426-4fa0-a17c-8bfe93b120c4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.533928 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e0daf140-d426-4fa0-a17c-8bfe93b120c4" (UID: "e0daf140-d426-4fa0-a17c-8bfe93b120c4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.567835 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:51:38 crc kubenswrapper[4797]: W0104 11:51:38.574025 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ed19471_bc8e_422c_a4fb_0d0c9efbb221.slice/crio-4a1c634caf8fc8092c2fad86ee6292006d94ec1e8083eef788df2b5562139b30 WatchSource:0}: Error finding container 4a1c634caf8fc8092c2fad86ee6292006d94ec1e8083eef788df2b5562139b30: Status 404 returned error can't find the container with id 4a1c634caf8fc8092c2fad86ee6292006d94ec1e8083eef788df2b5562139b30 Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.631822 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:38 crc kubenswrapper[4797]: I0104 11:51:38.631853 4797 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0daf140-d426-4fa0-a17c-8bfe93b120c4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.310782 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" event={"ID":"7ed19471-bc8e-422c-a4fb-0d0c9efbb221","Type":"ContainerStarted","Data":"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d"} Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.311116 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.311130 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" event={"ID":"7ed19471-bc8e-422c-a4fb-0d0c9efbb221","Type":"ContainerStarted","Data":"4a1c634caf8fc8092c2fad86ee6292006d94ec1e8083eef788df2b5562139b30"} Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.319367 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.319413 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e0daf140-d426-4fa0-a17c-8bfe93b120c4","Type":"ContainerDied","Data":"ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3"} Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.319438 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac255b4b03fb783511c98a5cccb3005bb6656a9768de8a1260e4b6ea76ee22d3" Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.320920 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:51:39 crc kubenswrapper[4797]: I0104 11:51:39.328025 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" podStartSLOduration=17.328010239 podStartE2EDuration="17.328010239s" podCreationTimestamp="2026-01-04 11:51:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:39.326320384 +0000 UTC m=+198.183507103" watchObservedRunningTime="2026-01-04 11:51:39.328010239 +0000 UTC m=+198.185196948" Jan 04 11:51:41 crc kubenswrapper[4797]: I0104 11:51:41.333790 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerStarted","Data":"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5"} Jan 04 11:51:42 crc kubenswrapper[4797]: I0104 11:51:42.344283 4797 generic.go:334] "Generic (PLEG): container finished" podID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerID="856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5" exitCode=0 Jan 04 11:51:42 crc kubenswrapper[4797]: I0104 11:51:42.344388 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerDied","Data":"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5"} Jan 04 11:51:43 crc kubenswrapper[4797]: I0104 11:51:43.351825 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerStarted","Data":"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec"} Jan 04 11:51:44 crc kubenswrapper[4797]: I0104 11:51:44.386162 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ck5tb" podStartSLOduration=3.449173337 podStartE2EDuration="58.386142228s" podCreationTimestamp="2026-01-04 11:50:46 +0000 UTC" firstStartedPulling="2026-01-04 11:50:47.84587435 +0000 UTC m=+146.703061059" lastFinishedPulling="2026-01-04 11:51:42.782843221 +0000 UTC m=+201.640029950" observedRunningTime="2026-01-04 11:51:44.385761748 +0000 UTC m=+203.242948457" watchObservedRunningTime="2026-01-04 11:51:44.386142228 +0000 UTC m=+203.243328927" Jan 04 11:51:45 crc kubenswrapper[4797]: I0104 11:51:45.367721 4797 generic.go:334] "Generic (PLEG): container finished" podID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerID="84ad83a86cc3eb7d3db96b8df6a85f2713cfa62ce6888c915c69d8ece3687561" exitCode=0 Jan 04 11:51:45 crc kubenswrapper[4797]: I0104 11:51:45.367813 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerDied","Data":"84ad83a86cc3eb7d3db96b8df6a85f2713cfa62ce6888c915c69d8ece3687561"} Jan 04 11:51:46 crc kubenswrapper[4797]: I0104 11:51:46.375368 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerStarted","Data":"c1e5af264f6b8ec1e6517b4c87cc8ea83972f6a09dedf6b966a4558cc8725af7"} Jan 04 11:51:46 crc kubenswrapper[4797]: I0104 11:51:46.393865 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5b2cz" podStartSLOduration=2.9423734660000003 podStartE2EDuration="1m3.39385054s" podCreationTimestamp="2026-01-04 11:50:43 +0000 UTC" firstStartedPulling="2026-01-04 11:50:45.645786694 +0000 UTC m=+144.502973403" lastFinishedPulling="2026-01-04 11:51:46.097263768 +0000 UTC m=+204.954450477" observedRunningTime="2026-01-04 11:51:46.390583944 +0000 UTC m=+205.247770663" watchObservedRunningTime="2026-01-04 11:51:46.39385054 +0000 UTC m=+205.251037249" Jan 04 11:51:47 crc kubenswrapper[4797]: I0104 11:51:47.244209 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:51:47 crc kubenswrapper[4797]: I0104 11:51:47.244526 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.150691 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pj8fz"] Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.388770 4797 generic.go:334] "Generic (PLEG): container finished" podID="99d3aa96-c341-4adc-ae7f-93d067347315" containerID="72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de" exitCode=0 Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.388866 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerDied","Data":"72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de"} Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.391759 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerStarted","Data":"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837"} Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.394632 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerStarted","Data":"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503"} Jan 04 11:51:48 crc kubenswrapper[4797]: I0104 11:51:48.441379 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ck5tb" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="registry-server" probeResult="failure" output=< Jan 04 11:51:48 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 11:51:48 crc kubenswrapper[4797]: > Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.401127 4797 generic.go:334] "Generic (PLEG): container finished" podID="f97f7bbd-7702-4344-b235-056f577f6b55" containerID="5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837" exitCode=0 Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.401294 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerDied","Data":"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837"} Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.403567 4797 generic.go:334] "Generic (PLEG): container finished" podID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerID="2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503" exitCode=0 Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.403584 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerDied","Data":"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503"} Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.407605 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerStarted","Data":"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba"} Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.450097 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zpvl4" podStartSLOduration=2.409320335 podStartE2EDuration="1m3.45008248s" podCreationTimestamp="2026-01-04 11:50:46 +0000 UTC" firstStartedPulling="2026-01-04 11:50:47.802359911 +0000 UTC m=+146.659546620" lastFinishedPulling="2026-01-04 11:51:48.843122046 +0000 UTC m=+207.700308765" observedRunningTime="2026-01-04 11:51:49.447253596 +0000 UTC m=+208.304440305" watchObservedRunningTime="2026-01-04 11:51:49.45008248 +0000 UTC m=+208.307269189" Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.493662 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.493716 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.493751 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.494199 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:51:49 crc kubenswrapper[4797]: I0104 11:51:49.494320 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7" gracePeriod=600 Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.413858 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerStarted","Data":"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320"} Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.415943 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerStarted","Data":"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4"} Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.418343 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7" exitCode=0 Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.418370 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7"} Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.418386 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef"} Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.428471 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-smn67" podStartSLOduration=2.322716308 podStartE2EDuration="1m4.428448673s" podCreationTimestamp="2026-01-04 11:50:46 +0000 UTC" firstStartedPulling="2026-01-04 11:50:47.846187378 +0000 UTC m=+146.703374087" lastFinishedPulling="2026-01-04 11:51:49.951919743 +0000 UTC m=+208.809106452" observedRunningTime="2026-01-04 11:51:50.428057163 +0000 UTC m=+209.285243882" watchObservedRunningTime="2026-01-04 11:51:50.428448673 +0000 UTC m=+209.285635382" Jan 04 11:51:50 crc kubenswrapper[4797]: I0104 11:51:50.443570 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lrkgj" podStartSLOduration=3.029508616 podStartE2EDuration="1m7.443554758s" podCreationTimestamp="2026-01-04 11:50:43 +0000 UTC" firstStartedPulling="2026-01-04 11:50:45.645831345 +0000 UTC m=+144.503018054" lastFinishedPulling="2026-01-04 11:51:50.059877477 +0000 UTC m=+208.917064196" observedRunningTime="2026-01-04 11:51:50.442385907 +0000 UTC m=+209.299572616" watchObservedRunningTime="2026-01-04 11:51:50.443554758 +0000 UTC m=+209.300741467" Jan 04 11:51:52 crc kubenswrapper[4797]: I0104 11:51:52.431351 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerStarted","Data":"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74"} Jan 04 11:51:52 crc kubenswrapper[4797]: I0104 11:51:52.434355 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerStarted","Data":"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88"} Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.448287 4797 generic.go:334] "Generic (PLEG): container finished" podID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerID="bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88" exitCode=0 Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.448756 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerDied","Data":"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88"} Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.452220 4797 generic.go:334] "Generic (PLEG): container finished" podID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerID="25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74" exitCode=0 Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.452253 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerDied","Data":"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74"} Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.873738 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:51:53 crc kubenswrapper[4797]: I0104 11:51:53.874118 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:51:54 crc kubenswrapper[4797]: I0104 11:51:54.134024 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:51:54 crc kubenswrapper[4797]: I0104 11:51:54.265351 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:54 crc kubenswrapper[4797]: I0104 11:51:54.265414 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:54 crc kubenswrapper[4797]: I0104 11:51:54.300684 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:54 crc kubenswrapper[4797]: I0104 11:51:54.499213 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:55 crc kubenswrapper[4797]: I0104 11:51:55.510948 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.469701 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerStarted","Data":"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661"} Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.471671 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerStarted","Data":"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe"} Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.473588 4797 generic.go:334] "Generic (PLEG): container finished" podID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerID="008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870" exitCode=0 Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.473675 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerDied","Data":"008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870"} Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.486900 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2wl2p" podStartSLOduration=3.634010246 podStartE2EDuration="1m13.486882111s" podCreationTimestamp="2026-01-04 11:50:43 +0000 UTC" firstStartedPulling="2026-01-04 11:50:45.646166084 +0000 UTC m=+144.503352793" lastFinishedPulling="2026-01-04 11:51:55.499037949 +0000 UTC m=+214.356224658" observedRunningTime="2026-01-04 11:51:56.485516305 +0000 UTC m=+215.342703014" watchObservedRunningTime="2026-01-04 11:51:56.486882111 +0000 UTC m=+215.344068830" Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.501470 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.501691 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5b2cz" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="registry-server" containerID="cri-o://c1e5af264f6b8ec1e6517b4c87cc8ea83972f6a09dedf6b966a4558cc8725af7" gracePeriod=2 Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.505785 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m2j8n" podStartSLOduration=3.647765115 podStartE2EDuration="1m11.505770335s" podCreationTimestamp="2026-01-04 11:50:45 +0000 UTC" firstStartedPulling="2026-01-04 11:50:47.67392675 +0000 UTC m=+146.531113459" lastFinishedPulling="2026-01-04 11:51:55.53193197 +0000 UTC m=+214.389118679" observedRunningTime="2026-01-04 11:51:56.503577358 +0000 UTC m=+215.360764067" watchObservedRunningTime="2026-01-04 11:51:56.505770335 +0000 UTC m=+215.362957034" Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.614643 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.614684 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:51:56 crc kubenswrapper[4797]: I0104 11:51:56.650943 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.035946 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.036934 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.084472 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.305436 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.353191 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.482119 4797 generic.go:334] "Generic (PLEG): container finished" podID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerID="c1e5af264f6b8ec1e6517b4c87cc8ea83972f6a09dedf6b966a4558cc8725af7" exitCode=0 Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.482848 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerDied","Data":"c1e5af264f6b8ec1e6517b4c87cc8ea83972f6a09dedf6b966a4558cc8725af7"} Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.527803 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:51:57 crc kubenswrapper[4797]: I0104 11:51:57.528129 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.233026 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.311198 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content\") pod \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.311375 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8jtm\" (UniqueName: \"kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm\") pod \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.311420 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities\") pod \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\" (UID: \"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df\") " Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.312147 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities" (OuterVolumeSpecName: "utilities") pod "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" (UID: "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.321183 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm" (OuterVolumeSpecName: "kube-api-access-r8jtm") pod "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" (UID: "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df"). InnerVolumeSpecName "kube-api-access-r8jtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.362376 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" (UID: "0ccf58b6-d6dc-4830-b997-0ab63ee5e6df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.412784 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8jtm\" (UniqueName: \"kubernetes.io/projected/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-kube-api-access-r8jtm\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.412817 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.412826 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.491368 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerStarted","Data":"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54"} Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.493600 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5b2cz" event={"ID":"0ccf58b6-d6dc-4830-b997-0ab63ee5e6df","Type":"ContainerDied","Data":"099da130895470cdea423958ed559fd4d3d05e6cfea2918425de5c718c1273ba"} Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.493639 4797 scope.go:117] "RemoveContainer" containerID="c1e5af264f6b8ec1e6517b4c87cc8ea83972f6a09dedf6b966a4558cc8725af7" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.493660 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5b2cz" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.529382 4797 scope.go:117] "RemoveContainer" containerID="84ad83a86cc3eb7d3db96b8df6a85f2713cfa62ce6888c915c69d8ece3687561" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.529884 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bz6gm" podStartSLOduration=4.29147966 podStartE2EDuration="1m14.529856604s" podCreationTimestamp="2026-01-04 11:50:44 +0000 UTC" firstStartedPulling="2026-01-04 11:50:46.654972574 +0000 UTC m=+145.512159283" lastFinishedPulling="2026-01-04 11:51:56.893349518 +0000 UTC m=+215.750536227" observedRunningTime="2026-01-04 11:51:58.50830138 +0000 UTC m=+217.365488089" watchObservedRunningTime="2026-01-04 11:51:58.529856604 +0000 UTC m=+217.387043333" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.546396 4797 scope.go:117] "RemoveContainer" containerID="a6002a14f1c848d275b8dfd66747e90a90a9de19da9e471e1dffad790e113097" Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.547666 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.550418 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5b2cz"] Jan 04 11:51:58 crc kubenswrapper[4797]: I0104 11:51:58.900884 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:51:59 crc kubenswrapper[4797]: I0104 11:51:59.484947 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" path="/var/lib/kubelet/pods/0ccf58b6-d6dc-4830-b997-0ab63ee5e6df/volumes" Jan 04 11:51:59 crc kubenswrapper[4797]: I0104 11:51:59.500805 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zpvl4" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="registry-server" containerID="cri-o://18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba" gracePeriod=2 Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.008714 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.039643 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities\") pod \"99d3aa96-c341-4adc-ae7f-93d067347315\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.039975 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content\") pod \"99d3aa96-c341-4adc-ae7f-93d067347315\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.040033 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pphcd\" (UniqueName: \"kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd\") pod \"99d3aa96-c341-4adc-ae7f-93d067347315\" (UID: \"99d3aa96-c341-4adc-ae7f-93d067347315\") " Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.040866 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities" (OuterVolumeSpecName: "utilities") pod "99d3aa96-c341-4adc-ae7f-93d067347315" (UID: "99d3aa96-c341-4adc-ae7f-93d067347315"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.064126 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99d3aa96-c341-4adc-ae7f-93d067347315" (UID: "99d3aa96-c341-4adc-ae7f-93d067347315"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.134375 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd" (OuterVolumeSpecName: "kube-api-access-pphcd") pod "99d3aa96-c341-4adc-ae7f-93d067347315" (UID: "99d3aa96-c341-4adc-ae7f-93d067347315"). InnerVolumeSpecName "kube-api-access-pphcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.141602 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.141636 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99d3aa96-c341-4adc-ae7f-93d067347315-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.141647 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pphcd\" (UniqueName: \"kubernetes.io/projected/99d3aa96-c341-4adc-ae7f-93d067347315-kube-api-access-pphcd\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.509015 4797 generic.go:334] "Generic (PLEG): container finished" podID="99d3aa96-c341-4adc-ae7f-93d067347315" containerID="18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba" exitCode=0 Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.509054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerDied","Data":"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba"} Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.509063 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zpvl4" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.509079 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zpvl4" event={"ID":"99d3aa96-c341-4adc-ae7f-93d067347315","Type":"ContainerDied","Data":"aba8a513998fa5f924a944b3f2cc1a18d2b5c69fe2042b27ea5573a7a4fc1b80"} Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.509136 4797 scope.go:117] "RemoveContainer" containerID="18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.532415 4797 scope.go:117] "RemoveContainer" containerID="72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.537732 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.543666 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zpvl4"] Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.560518 4797 scope.go:117] "RemoveContainer" containerID="f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.577533 4797 scope.go:117] "RemoveContainer" containerID="18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba" Jan 04 11:52:00 crc kubenswrapper[4797]: E0104 11:52:00.577925 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba\": container with ID starting with 18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba not found: ID does not exist" containerID="18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.578054 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba"} err="failed to get container status \"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba\": rpc error: code = NotFound desc = could not find container \"18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba\": container with ID starting with 18dc23ed026cb3bf2383b7d5e4ca4ec51f5054777eeb1bbb45afd9c35afa96ba not found: ID does not exist" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.578094 4797 scope.go:117] "RemoveContainer" containerID="72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de" Jan 04 11:52:00 crc kubenswrapper[4797]: E0104 11:52:00.578540 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de\": container with ID starting with 72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de not found: ID does not exist" containerID="72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.578576 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de"} err="failed to get container status \"72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de\": rpc error: code = NotFound desc = could not find container \"72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de\": container with ID starting with 72022e37a1f5456fc5ee53379ebc4c2c4c1ada776d505ca5d9f3033d368827de not found: ID does not exist" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.578599 4797 scope.go:117] "RemoveContainer" containerID="f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2" Jan 04 11:52:00 crc kubenswrapper[4797]: E0104 11:52:00.578948 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2\": container with ID starting with f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2 not found: ID does not exist" containerID="f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2" Jan 04 11:52:00 crc kubenswrapper[4797]: I0104 11:52:00.579011 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2"} err="failed to get container status \"f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2\": rpc error: code = NotFound desc = could not find container \"f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2\": container with ID starting with f6b27367cb4048376fc69fa10e059fce67571c807567cf1236999c3f7a9dd1b2 not found: ID does not exist" Jan 04 11:52:01 crc kubenswrapper[4797]: I0104 11:52:01.308563 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:52:01 crc kubenswrapper[4797]: I0104 11:52:01.308934 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ck5tb" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="registry-server" containerID="cri-o://bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec" gracePeriod=2 Jan 04 11:52:01 crc kubenswrapper[4797]: I0104 11:52:01.483182 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" path="/var/lib/kubelet/pods/99d3aa96-c341-4adc-ae7f-93d067347315/volumes" Jan 04 11:52:02 crc kubenswrapper[4797]: I0104 11:52:02.598519 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:52:02 crc kubenswrapper[4797]: I0104 11:52:02.598827 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" podUID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" containerName="controller-manager" containerID="cri-o://531c875e523ee845c04ba43b614c4c770ea99c2bf2b6b2b0e9b358a78347254b" gracePeriod=30 Jan 04 11:52:02 crc kubenswrapper[4797]: I0104 11:52:02.691936 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:52:02 crc kubenswrapper[4797]: I0104 11:52:02.692476 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" podUID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" containerName="route-controller-manager" containerID="cri-o://7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d" gracePeriod=30 Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.055865 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.055942 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.142710 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.445819 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473573 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473823 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="extract-utilities" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473837 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="extract-utilities" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473855 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" containerName="route-controller-manager" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473863 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" containerName="route-controller-manager" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473872 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="extract-content" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473880 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="extract-content" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473892 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="extract-utilities" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473899 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="extract-utilities" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473912 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473919 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473935 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0daf140-d426-4fa0-a17c-8bfe93b120c4" containerName="pruner" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473943 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0daf140-d426-4fa0-a17c-8bfe93b120c4" containerName="pruner" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473953 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473962 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.473971 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="extract-content" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.473980 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="extract-content" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.474116 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0daf140-d426-4fa0-a17c-8bfe93b120c4" containerName="pruner" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.474130 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ccf58b6-d6dc-4830-b997-0ab63ee5e6df" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.474143 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d3aa96-c341-4adc-ae7f-93d067347315" containerName="registry-server" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.474157 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" containerName="route-controller-manager" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.474737 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.486421 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.487418 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500396 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca\") pod \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500451 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2lkw\" (UniqueName: \"kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw\") pod \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500488 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert\") pod \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500514 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config\") pod \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\" (UID: \"7ed19471-bc8e-422c-a4fb-0d0c9efbb221\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500726 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500762 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500806 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.500839 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrn7n\" (UniqueName: \"kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.501189 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca" (OuterVolumeSpecName: "client-ca") pod "7ed19471-bc8e-422c-a4fb-0d0c9efbb221" (UID: "7ed19471-bc8e-422c-a4fb-0d0c9efbb221"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.502112 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.502690 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config" (OuterVolumeSpecName: "config") pod "7ed19471-bc8e-422c-a4fb-0d0c9efbb221" (UID: "7ed19471-bc8e-422c-a4fb-0d0c9efbb221"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.507231 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw" (OuterVolumeSpecName: "kube-api-access-z2lkw") pod "7ed19471-bc8e-422c-a4fb-0d0c9efbb221" (UID: "7ed19471-bc8e-422c-a4fb-0d0c9efbb221"). InnerVolumeSpecName "kube-api-access-z2lkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.508110 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.514119 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7ed19471-bc8e-422c-a4fb-0d0c9efbb221" (UID: "7ed19471-bc8e-422c-a4fb-0d0c9efbb221"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.541550 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.546346 4797 generic.go:334] "Generic (PLEG): container finished" podID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" containerID="7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d" exitCode=0 Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.546398 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" event={"ID":"7ed19471-bc8e-422c-a4fb-0d0c9efbb221","Type":"ContainerDied","Data":"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d"} Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.546421 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" event={"ID":"7ed19471-bc8e-422c-a4fb-0d0c9efbb221","Type":"ContainerDied","Data":"4a1c634caf8fc8092c2fad86ee6292006d94ec1e8083eef788df2b5562139b30"} Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.546436 4797 scope.go:117] "RemoveContainer" containerID="7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.546519 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.550532 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.550973 4797 generic.go:334] "Generic (PLEG): container finished" podID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" containerID="531c875e523ee845c04ba43b614c4c770ea99c2bf2b6b2b0e9b358a78347254b" exitCode=0 Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.551024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" event={"ID":"d922cdfa-0bf5-4f0e-9bff-9932bce71959","Type":"ContainerDied","Data":"531c875e523ee845c04ba43b614c4c770ea99c2bf2b6b2b0e9b358a78347254b"} Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.555755 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck5tb" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.556413 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerDied","Data":"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec"} Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.556342 4797 generic.go:334] "Generic (PLEG): container finished" podID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerID="bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec" exitCode=0 Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.557402 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck5tb" event={"ID":"c698bb8a-7f5e-40c4-b757-685b34fbe709","Type":"ContainerDied","Data":"760651836a3667be4927f7510b29c49d2675976991970654277084603ef98fd2"} Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.577326 4797 scope.go:117] "RemoveContainer" containerID="7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.582171 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d\": container with ID starting with 7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d not found: ID does not exist" containerID="7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.582436 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d"} err="failed to get container status \"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d\": rpc error: code = NotFound desc = could not find container \"7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d\": container with ID starting with 7ced1ba37b2f7a3249bb25d818acd168630de71f6423cbcbcd02ab766550472d not found: ID does not exist" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.582561 4797 scope.go:117] "RemoveContainer" containerID="531c875e523ee845c04ba43b614c4c770ea99c2bf2b6b2b0e9b358a78347254b" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.600577 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.605479 4797 scope.go:117] "RemoveContainer" containerID="bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.609465 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610260 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities\") pod \"c698bb8a-7f5e-40c4-b757-685b34fbe709\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610335 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config\") pod \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610399 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca\") pod \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610427 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlcvm\" (UniqueName: \"kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm\") pod \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610470 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert\") pod \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610495 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content\") pod \"c698bb8a-7f5e-40c4-b757-685b34fbe709\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610531 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles\") pod \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\" (UID: \"d922cdfa-0bf5-4f0e-9bff-9932bce71959\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610600 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnplt\" (UniqueName: \"kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt\") pod \"c698bb8a-7f5e-40c4-b757-685b34fbe709\" (UID: \"c698bb8a-7f5e-40c4-b757-685b34fbe709\") " Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.610799 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611188 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611323 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611375 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrn7n\" (UniqueName: \"kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611448 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611468 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2lkw\" (UniqueName: \"kubernetes.io/projected/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-kube-api-access-z2lkw\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611482 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.611494 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed19471-bc8e-422c-a4fb-0d0c9efbb221-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.612769 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca" (OuterVolumeSpecName: "client-ca") pod "d922cdfa-0bf5-4f0e-9bff-9932bce71959" (UID: "d922cdfa-0bf5-4f0e-9bff-9932bce71959"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.613233 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities" (OuterVolumeSpecName: "utilities") pod "c698bb8a-7f5e-40c4-b757-685b34fbe709" (UID: "c698bb8a-7f5e-40c4-b757-685b34fbe709"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.614472 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config" (OuterVolumeSpecName: "config") pod "d922cdfa-0bf5-4f0e-9bff-9932bce71959" (UID: "d922cdfa-0bf5-4f0e-9bff-9932bce71959"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.617020 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.617197 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm" (OuterVolumeSpecName: "kube-api-access-wlcvm") pod "d922cdfa-0bf5-4f0e-9bff-9932bce71959" (UID: "d922cdfa-0bf5-4f0e-9bff-9932bce71959"). InnerVolumeSpecName "kube-api-access-wlcvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.617574 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.617577 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d922cdfa-0bf5-4f0e-9bff-9932bce71959" (UID: "d922cdfa-0bf5-4f0e-9bff-9932bce71959"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.618245 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.620211 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.621229 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d922cdfa-0bf5-4f0e-9bff-9932bce71959" (UID: "d922cdfa-0bf5-4f0e-9bff-9932bce71959"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.621383 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-645999796d-kb4wm"] Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.632037 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt" (OuterVolumeSpecName: "kube-api-access-mnplt") pod "c698bb8a-7f5e-40c4-b757-685b34fbe709" (UID: "c698bb8a-7f5e-40c4-b757-685b34fbe709"). InnerVolumeSpecName "kube-api-access-mnplt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.638219 4797 scope.go:117] "RemoveContainer" containerID="856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.638630 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrn7n\" (UniqueName: \"kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n\") pod \"route-controller-manager-77456d887c-n8frk\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.654836 4797 scope.go:117] "RemoveContainer" containerID="63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.667654 4797 scope.go:117] "RemoveContainer" containerID="bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.668232 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec\": container with ID starting with bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec not found: ID does not exist" containerID="bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.668288 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec"} err="failed to get container status \"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec\": rpc error: code = NotFound desc = could not find container \"bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec\": container with ID starting with bffcd5c79b4886f5d6880068b9f8b60eb8dc7d2b82ccf6bd60c3ea101215d9ec not found: ID does not exist" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.668318 4797 scope.go:117] "RemoveContainer" containerID="856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.668626 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5\": container with ID starting with 856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5 not found: ID does not exist" containerID="856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.668657 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5"} err="failed to get container status \"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5\": rpc error: code = NotFound desc = could not find container \"856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5\": container with ID starting with 856fd8659233cb92374438ea7f6fbf4e263ef88572927dd22b46d2c19385b1c5 not found: ID does not exist" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.668682 4797 scope.go:117] "RemoveContainer" containerID="63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae" Jan 04 11:52:04 crc kubenswrapper[4797]: E0104 11:52:04.668903 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae\": container with ID starting with 63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae not found: ID does not exist" containerID="63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.668921 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae"} err="failed to get container status \"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae\": rpc error: code = NotFound desc = could not find container \"63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae\": container with ID starting with 63c01ae91d25352b832ba0edefb37775cb56099199537ac4e5990346eef888ae not found: ID does not exist" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712654 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d922cdfa-0bf5-4f0e-9bff-9932bce71959-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712676 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712686 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnplt\" (UniqueName: \"kubernetes.io/projected/c698bb8a-7f5e-40c4-b757-685b34fbe709-kube-api-access-mnplt\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712696 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712703 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712712 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d922cdfa-0bf5-4f0e-9bff-9932bce71959-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.712720 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlcvm\" (UniqueName: \"kubernetes.io/projected/d922cdfa-0bf5-4f0e-9bff-9932bce71959-kube-api-access-wlcvm\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.753890 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c698bb8a-7f5e-40c4-b757-685b34fbe709" (UID: "c698bb8a-7f5e-40c4-b757-685b34fbe709"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.813863 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c698bb8a-7f5e-40c4-b757-685b34fbe709-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.842282 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.884203 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:52:04 crc kubenswrapper[4797]: I0104 11:52:04.888907 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ck5tb"] Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.103347 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.362167 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.487891 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ed19471-bc8e-422c-a4fb-0d0c9efbb221" path="/var/lib/kubelet/pods/7ed19471-bc8e-422c-a4fb-0d0c9efbb221/volumes" Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.488830 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" path="/var/lib/kubelet/pods/c698bb8a-7f5e-40c4-b757-685b34fbe709/volumes" Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.564662 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" event={"ID":"d922cdfa-0bf5-4f0e-9bff-9932bce71959","Type":"ContainerDied","Data":"dc4826a97f93ce0f817d61708ba63305142a4642b43ff5e914e52c92e8f034d6"} Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.564778 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7547fdc57b-hv6ts" Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.577173 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" event={"ID":"56b77313-425b-4096-bfa8-6e9c7fffa300","Type":"ContainerStarted","Data":"631dac42ec3ef117000821fa4d1efc7ca96980bb08938975d3003fd912fe90a0"} Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.598397 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:52:05 crc kubenswrapper[4797]: I0104 11:52:05.604254 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7547fdc57b-hv6ts"] Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.252423 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.252752 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.331394 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.590044 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" event={"ID":"56b77313-425b-4096-bfa8-6e9c7fffa300","Type":"ContainerStarted","Data":"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320"} Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.591082 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bz6gm" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="registry-server" containerID="cri-o://f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54" gracePeriod=2 Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.625746 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" podStartSLOduration=4.62571234 podStartE2EDuration="4.62571234s" podCreationTimestamp="2026-01-04 11:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:06.617111545 +0000 UTC m=+225.474298334" watchObservedRunningTime="2026-01-04 11:52:06.62571234 +0000 UTC m=+225.482899089" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.663672 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.882364 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:52:06 crc kubenswrapper[4797]: E0104 11:52:06.882910 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="extract-utilities" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.882980 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="extract-utilities" Jan 04 11:52:06 crc kubenswrapper[4797]: E0104 11:52:06.883072 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="registry-server" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.883088 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="registry-server" Jan 04 11:52:06 crc kubenswrapper[4797]: E0104 11:52:06.883158 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="extract-content" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.883174 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="extract-content" Jan 04 11:52:06 crc kubenswrapper[4797]: E0104 11:52:06.883192 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" containerName="controller-manager" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.883244 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" containerName="controller-manager" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.883562 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" containerName="controller-manager" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.883636 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c698bb8a-7f5e-40c4-b757-685b34fbe709" containerName="registry-server" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.884703 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.894125 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.894394 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.894908 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.895195 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.895363 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.895418 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.901306 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.910277 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.946754 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.946899 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.946964 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.947018 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:06 crc kubenswrapper[4797]: I0104 11:52:06.947067 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dtpz\" (UniqueName: \"kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.048039 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.048129 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.048170 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.048235 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dtpz\" (UniqueName: \"kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.048293 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.049644 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.050189 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.050745 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.080737 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dtpz\" (UniqueName: \"kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.084467 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert\") pod \"controller-manager-6797fc497-lm766\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.208673 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.215922 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.249752 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities\") pod \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.249797 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content\") pod \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.249890 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25scc\" (UniqueName: \"kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc\") pod \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\" (UID: \"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b\") " Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.251333 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities" (OuterVolumeSpecName: "utilities") pod "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" (UID: "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.253446 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc" (OuterVolumeSpecName: "kube-api-access-25scc") pod "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" (UID: "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b"). InnerVolumeSpecName "kube-api-access-25scc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.327804 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" (UID: "9263a31b-4c9c-4afb-8ff8-d4b5d1da489b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.351857 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.351899 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.351916 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25scc\" (UniqueName: \"kubernetes.io/projected/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b-kube-api-access-25scc\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.436273 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.489059 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d922cdfa-0bf5-4f0e-9bff-9932bce71959" path="/var/lib/kubelet/pods/d922cdfa-0bf5-4f0e-9bff-9932bce71959/volumes" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.596493 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" event={"ID":"182dae25-4f4a-4547-99a9-4c1bb50d31ff","Type":"ContainerStarted","Data":"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde"} Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.596534 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" event={"ID":"182dae25-4f4a-4547-99a9-4c1bb50d31ff","Type":"ContainerStarted","Data":"987656d4c1b9851cebbd373c6c0b9f6b1cd921a1edf8296d5104ccb43ed6fe62"} Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.596710 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.598168 4797 patch_prober.go:28] interesting pod/controller-manager-6797fc497-lm766 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.598208 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.598831 4797 generic.go:334] "Generic (PLEG): container finished" podID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerID="f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54" exitCode=0 Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.599406 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bz6gm" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.599747 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerDied","Data":"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54"} Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.599777 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.599789 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bz6gm" event={"ID":"9263a31b-4c9c-4afb-8ff8-d4b5d1da489b","Type":"ContainerDied","Data":"167566ca4939bf50dcfa1576aaddfaecc581cc46a3b973b7d6427aaae9e9e6c8"} Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.600159 4797 scope.go:117] "RemoveContainer" containerID="f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.605285 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.612611 4797 scope.go:117] "RemoveContainer" containerID="008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.622527 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" podStartSLOduration=5.622512196 podStartE2EDuration="5.622512196s" podCreationTimestamp="2026-01-04 11:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:07.621830858 +0000 UTC m=+226.479017567" watchObservedRunningTime="2026-01-04 11:52:07.622512196 +0000 UTC m=+226.479698905" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.627599 4797 scope.go:117] "RemoveContainer" containerID="d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.633235 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.636680 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bz6gm"] Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.645467 4797 scope.go:117] "RemoveContainer" containerID="f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54" Jan 04 11:52:07 crc kubenswrapper[4797]: E0104 11:52:07.645891 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54\": container with ID starting with f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54 not found: ID does not exist" containerID="f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.645917 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54"} err="failed to get container status \"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54\": rpc error: code = NotFound desc = could not find container \"f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54\": container with ID starting with f0f6f2703d49380898a43841e20b2f76a4980c24571e95f317ff10500b1dcf54 not found: ID does not exist" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.645939 4797 scope.go:117] "RemoveContainer" containerID="008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870" Jan 04 11:52:07 crc kubenswrapper[4797]: E0104 11:52:07.646217 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870\": container with ID starting with 008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870 not found: ID does not exist" containerID="008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.646235 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870"} err="failed to get container status \"008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870\": rpc error: code = NotFound desc = could not find container \"008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870\": container with ID starting with 008fb01ecf52811da8691f573bcf669ce5756dc2e6198ee7f0b7d38506be4870 not found: ID does not exist" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.646247 4797 scope.go:117] "RemoveContainer" containerID="d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac" Jan 04 11:52:07 crc kubenswrapper[4797]: E0104 11:52:07.646599 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac\": container with ID starting with d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac not found: ID does not exist" containerID="d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac" Jan 04 11:52:07 crc kubenswrapper[4797]: I0104 11:52:07.646617 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac"} err="failed to get container status \"d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac\": rpc error: code = NotFound desc = could not find container \"d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac\": container with ID starting with d3422757c63c1491ebad24c5e69c3aa5320fcdd80c05f2adbbd04237346abaac not found: ID does not exist" Jan 04 11:52:08 crc kubenswrapper[4797]: I0104 11:52:08.618546 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:52:09 crc kubenswrapper[4797]: I0104 11:52:09.482534 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" path="/var/lib/kubelet/pods/9263a31b-4c9c-4afb-8ff8-d4b5d1da489b/volumes" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.191849 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" containerName="oauth-openshift" containerID="cri-o://258b05a7cba49800d73d805c1a69040f30359db0e2c763a584aefa8ce617c66b" gracePeriod=15 Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.648320 4797 generic.go:334] "Generic (PLEG): container finished" podID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" containerID="258b05a7cba49800d73d805c1a69040f30359db0e2c763a584aefa8ce617c66b" exitCode=0 Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.648548 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" event={"ID":"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0","Type":"ContainerDied","Data":"258b05a7cba49800d73d805c1a69040f30359db0e2c763a584aefa8ce617c66b"} Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.779036 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959782 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959875 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959915 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959940 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959962 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.959981 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960008 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960024 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960044 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzvmp\" (UniqueName: \"kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960062 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960093 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960121 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960138 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960160 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert\") pod \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\" (UID: \"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0\") " Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.960718 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.961060 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.961082 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.961374 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.961679 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.966245 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.966476 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.966579 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp" (OuterVolumeSpecName: "kube-api-access-rzvmp") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "kube-api-access-rzvmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.966977 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.967257 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.967542 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.967761 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.971803 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:13 crc kubenswrapper[4797]: I0104 11:52:13.977409 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" (UID: "8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002197 4797 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.002448 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="extract-content" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002460 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="extract-content" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.002475 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" containerName="oauth-openshift" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002480 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" containerName="oauth-openshift" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.002491 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="registry-server" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002497 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="registry-server" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.002508 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="extract-utilities" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002514 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="extract-utilities" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002598 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="9263a31b-4c9c-4afb-8ff8-d4b5d1da489b" containerName="registry-server" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.002612 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" containerName="oauth-openshift" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.003077 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.003423 4797 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.003856 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4" gracePeriod=15 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.003893 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e" gracePeriod=15 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.004042 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd" gracePeriod=15 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.003935 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546" gracePeriod=15 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.004107 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7" gracePeriod=15 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006152 4797 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006428 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006441 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006481 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006515 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006526 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006533 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006544 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006549 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006562 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006567 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:52:14 crc kubenswrapper[4797]: E0104 11:52:14.006576 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006581 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006671 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006682 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006718 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006727 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.006736 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061652 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061689 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061711 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061727 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061761 4797 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061787 4797 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061802 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061817 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzvmp\" (UniqueName: \"kubernetes.io/projected/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-kube-api-access-rzvmp\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061846 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061955 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061968 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.061978 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.062021 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.062031 4797 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.171795 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.171843 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.171884 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.171906 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.172022 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.172054 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.172074 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.172112 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273419 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273479 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273523 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273546 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273642 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273658 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273807 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273955 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.274016 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273686 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273662 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.274162 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273684 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.274203 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.273630 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.274228 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.655950 4797 generic.go:334] "Generic (PLEG): container finished" podID="afa53643-f607-45d0-b4f4-807edcd89c74" containerID="67e9885b7253d638e70b92fc0c91daff61317b0b3a900542debf162cbd5003c6" exitCode=0 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.656059 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"afa53643-f607-45d0-b4f4-807edcd89c74","Type":"ContainerDied","Data":"67e9885b7253d638e70b92fc0c91daff61317b0b3a900542debf162cbd5003c6"} Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.657057 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.657425 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.659380 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.660241 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546" exitCode=0 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.660285 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e" exitCode=0 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.660304 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7" exitCode=0 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.660320 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd" exitCode=2 Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.661932 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" event={"ID":"8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0","Type":"ContainerDied","Data":"e4c43fd65255ca0bdf279b0e777cfd6c08b4c13120719822924ded12c0bfa312"} Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.662007 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.662020 4797 scope.go:117] "RemoveContainer" containerID="258b05a7cba49800d73d805c1a69040f30359db0e2c763a584aefa8ce617c66b" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.664130 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.664787 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.665466 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.680515 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.681099 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:14 crc kubenswrapper[4797]: I0104 11:52:14.681505 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.111981 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.113298 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.113752 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.303689 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access\") pod \"afa53643-f607-45d0-b4f4-807edcd89c74\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.303752 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir\") pod \"afa53643-f607-45d0-b4f4-807edcd89c74\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.303772 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock\") pod \"afa53643-f607-45d0-b4f4-807edcd89c74\" (UID: \"afa53643-f607-45d0-b4f4-807edcd89c74\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.303834 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "afa53643-f607-45d0-b4f4-807edcd89c74" (UID: "afa53643-f607-45d0-b4f4-807edcd89c74"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.303906 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock" (OuterVolumeSpecName: "var-lock") pod "afa53643-f607-45d0-b4f4-807edcd89c74" (UID: "afa53643-f607-45d0-b4f4-807edcd89c74"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.304076 4797 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.304088 4797 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/afa53643-f607-45d0-b4f4-807edcd89c74-var-lock\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.308100 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "afa53643-f607-45d0-b4f4-807edcd89c74" (UID: "afa53643-f607-45d0-b4f4-807edcd89c74"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.365688 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.366737 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.367302 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.367777 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.368194 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.405493 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afa53643-f607-45d0-b4f4-807edcd89c74-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.506591 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.506759 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507398 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507533 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507575 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507736 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507943 4797 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507968 4797 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.507981 4797 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.681924 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.682874 4797 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4" exitCode=0 Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.683069 4797 scope.go:117] "RemoveContainer" containerID="6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.683082 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.685555 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"afa53643-f607-45d0-b4f4-807edcd89c74","Type":"ContainerDied","Data":"5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3"} Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.685607 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ba09a53fd0f4f2313010a207c61b6a1eb622f2a3f9eca790165c6253fdee9e3" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.685704 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.704355 4797 scope.go:117] "RemoveContainer" containerID="1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.715829 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.716496 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.716981 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.717681 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.718176 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.718592 4797 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.725565 4797 scope.go:117] "RemoveContainer" containerID="3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.740871 4797 scope.go:117] "RemoveContainer" containerID="520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.757892 4797 scope.go:117] "RemoveContainer" containerID="1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.774018 4797 scope.go:117] "RemoveContainer" containerID="c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.796896 4797 scope.go:117] "RemoveContainer" containerID="6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.797649 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\": container with ID starting with 6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546 not found: ID does not exist" containerID="6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.797714 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546"} err="failed to get container status \"6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\": rpc error: code = NotFound desc = could not find container \"6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546\": container with ID starting with 6286650b3ef2597a18fab1c0bd8cb1b5688cf6cf3bf1c9212badbd2ccb696546 not found: ID does not exist" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.797761 4797 scope.go:117] "RemoveContainer" containerID="1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.798201 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\": container with ID starting with 1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e not found: ID does not exist" containerID="1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.798250 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e"} err="failed to get container status \"1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\": rpc error: code = NotFound desc = could not find container \"1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e\": container with ID starting with 1e57eeadd69820839f53664072465d72de44d1e3c89dbf0a60e080af8205cd0e not found: ID does not exist" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.798285 4797 scope.go:117] "RemoveContainer" containerID="3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.798615 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\": container with ID starting with 3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7 not found: ID does not exist" containerID="3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.798661 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7"} err="failed to get container status \"3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\": rpc error: code = NotFound desc = could not find container \"3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7\": container with ID starting with 3a2f58857b8d0da3cddc78f906374142df85154a39d3f9b89b450e33b3a1a8f7 not found: ID does not exist" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.798702 4797 scope.go:117] "RemoveContainer" containerID="520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.799184 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\": container with ID starting with 520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd not found: ID does not exist" containerID="520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.799225 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd"} err="failed to get container status \"520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\": rpc error: code = NotFound desc = could not find container \"520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd\": container with ID starting with 520efa11198d484f515c07bfccdf92a127eae7614613ce4346ce146b786991dd not found: ID does not exist" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.799251 4797 scope.go:117] "RemoveContainer" containerID="1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.799627 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\": container with ID starting with 1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4 not found: ID does not exist" containerID="1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.799725 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4"} err="failed to get container status \"1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\": rpc error: code = NotFound desc = could not find container \"1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4\": container with ID starting with 1ad3addb53ea7dc5ec6db399afe773f5b6391c411000913775e471215ba05bd4 not found: ID does not exist" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.799754 4797 scope.go:117] "RemoveContainer" containerID="c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07" Jan 04 11:52:16 crc kubenswrapper[4797]: E0104 11:52:16.802073 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\": container with ID starting with c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07 not found: ID does not exist" containerID="c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07" Jan 04 11:52:16 crc kubenswrapper[4797]: I0104 11:52:16.802119 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07"} err="failed to get container status \"c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\": rpc error: code = NotFound desc = could not find container \"c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07\": container with ID starting with c4327fab8e33cf851e814fc762cd3dfda284a6d10b9ce5f90d23063419262d07 not found: ID does not exist" Jan 04 11:52:17 crc kubenswrapper[4797]: I0104 11:52:17.495588 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 04 11:52:17 crc kubenswrapper[4797]: E0104 11:52:17.520824 4797 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" volumeName="registry-storage" Jan 04 11:52:19 crc kubenswrapper[4797]: E0104 11:52:19.031917 4797 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:19 crc kubenswrapper[4797]: I0104 11:52:19.032448 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:19 crc kubenswrapper[4797]: W0104 11:52:19.070412 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-270037d3aa18721e372257e9851390038109a582fdea691bce8040f58a4eda9a WatchSource:0}: Error finding container 270037d3aa18721e372257e9851390038109a582fdea691bce8040f58a4eda9a: Status 404 returned error can't find the container with id 270037d3aa18721e372257e9851390038109a582fdea691bce8040f58a4eda9a Jan 04 11:52:19 crc kubenswrapper[4797]: E0104 11:52:19.076102 4797 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188784ee994c347f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:52:19.075200127 +0000 UTC m=+237.932386866,LastTimestamp:2026-01-04 11:52:19.075200127 +0000 UTC m=+237.932386866,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:52:19 crc kubenswrapper[4797]: I0104 11:52:19.708112 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90"} Jan 04 11:52:19 crc kubenswrapper[4797]: I0104 11:52:19.708163 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"270037d3aa18721e372257e9851390038109a582fdea691bce8040f58a4eda9a"} Jan 04 11:52:19 crc kubenswrapper[4797]: E0104 11:52:19.708895 4797 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:19 crc kubenswrapper[4797]: I0104 11:52:19.709100 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:19 crc kubenswrapper[4797]: I0104 11:52:19.709607 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:21 crc kubenswrapper[4797]: I0104 11:52:21.482347 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:21 crc kubenswrapper[4797]: I0104 11:52:21.483696 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:21 crc kubenswrapper[4797]: E0104 11:52:21.962223 4797 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188784ee994c347f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:52:19.075200127 +0000 UTC m=+237.932386866,LastTimestamp:2026-01-04 11:52:19.075200127 +0000 UTC m=+237.932386866,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.764750 4797 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.765757 4797 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.766454 4797 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.767077 4797 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.767624 4797 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:22 crc kubenswrapper[4797]: I0104 11:52:22.767659 4797 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.767975 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="200ms" Jan 04 11:52:22 crc kubenswrapper[4797]: E0104 11:52:22.969110 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="400ms" Jan 04 11:52:23 crc kubenswrapper[4797]: E0104 11:52:23.371586 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="800ms" Jan 04 11:52:24 crc kubenswrapper[4797]: E0104 11:52:24.172290 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="1.6s" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.473682 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.474351 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.475046 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.486936 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.486967 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:24 crc kubenswrapper[4797]: E0104 11:52:24.487411 4797 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.487877 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:24 crc kubenswrapper[4797]: I0104 11:52:24.740564 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9a91f68246cd406971f08a076a032a669a5765e942988fe469361e518d1e20b3"} Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.750738 4797 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="f8b4830585c5276d50c280094bc32a6d7ebf13b1e1b35ece5dbfd434ac8be600" exitCode=0 Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.750804 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"f8b4830585c5276d50c280094bc32a6d7ebf13b1e1b35ece5dbfd434ac8be600"} Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.751201 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.751240 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.751602 4797 status_manager.go:851] "Failed to get status for pod" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" pod="openshift-authentication/oauth-openshift-558db77b4-pj8fz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pj8fz\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:25 crc kubenswrapper[4797]: E0104 11:52:25.751810 4797 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:25 crc kubenswrapper[4797]: I0104 11:52:25.752123 4797 status_manager.go:851] "Failed to get status for pod" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 04 11:52:25 crc kubenswrapper[4797]: E0104 11:52:25.774511 4797 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="3.2s" Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.759742 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cbf5459b6229cd2d0db0b26dfa4b46ba4e45cc63b109aecf9c4e7d95a84c35a9"} Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.760097 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"39b70183e532b71629ef65dc1b9cc3bd511f7f17ac31cbb05e59d09ddabd0148"} Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.760112 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f2e7eb208c4d5724998961205d7f86234dbd0a887b283ee4dddbb356755029c8"} Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.764788 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.764843 4797 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f" exitCode=1 Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.764874 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f"} Jan 04 11:52:26 crc kubenswrapper[4797]: I0104 11:52:26.765326 4797 scope.go:117] "RemoveContainer" containerID="0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f" Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.775447 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.776236 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"557fa4c45b7dc26914a024ddbcff562299ac8089e8137b2cfe3f8675506f5dab"} Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.779749 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c2258342bc25492f7b9738895b05e06b049e36d48080b7bd1d36bded2d8491da"} Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.779779 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4da4c7ae19f48ebd3658ca19be6c5bb5fe4372e3fd9230da14c5865e31c12d62"} Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.780074 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.780191 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:27 crc kubenswrapper[4797]: I0104 11:52:27.780225 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:29 crc kubenswrapper[4797]: I0104 11:52:29.488424 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:29 crc kubenswrapper[4797]: I0104 11:52:29.489797 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:29 crc kubenswrapper[4797]: I0104 11:52:29.497700 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:31 crc kubenswrapper[4797]: I0104 11:52:31.769643 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:31 crc kubenswrapper[4797]: I0104 11:52:31.772617 4797 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 04 11:52:31 crc kubenswrapper[4797]: I0104 11:52:31.772710 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 04 11:52:32 crc kubenswrapper[4797]: I0104 11:52:32.827519 4797 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:32 crc kubenswrapper[4797]: I0104 11:52:32.928115 4797 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="bf1b7c8f-0c57-4bc7-8c88-3eb2b1a671eb" Jan 04 11:52:33 crc kubenswrapper[4797]: I0104 11:52:33.819566 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:33 crc kubenswrapper[4797]: I0104 11:52:33.819607 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:33 crc kubenswrapper[4797]: I0104 11:52:33.823769 4797 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="bf1b7c8f-0c57-4bc7-8c88-3eb2b1a671eb" Jan 04 11:52:33 crc kubenswrapper[4797]: I0104 11:52:33.827343 4797 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://f2e7eb208c4d5724998961205d7f86234dbd0a887b283ee4dddbb356755029c8" Jan 04 11:52:33 crc kubenswrapper[4797]: I0104 11:52:33.827389 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:34 crc kubenswrapper[4797]: I0104 11:52:34.828206 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:34 crc kubenswrapper[4797]: I0104 11:52:34.828253 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:34 crc kubenswrapper[4797]: I0104 11:52:34.833141 4797 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="bf1b7c8f-0c57-4bc7-8c88-3eb2b1a671eb" Jan 04 11:52:35 crc kubenswrapper[4797]: I0104 11:52:35.241502 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:39 crc kubenswrapper[4797]: I0104 11:52:39.231866 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 04 11:52:39 crc kubenswrapper[4797]: I0104 11:52:39.574583 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 04 11:52:39 crc kubenswrapper[4797]: I0104 11:52:39.707063 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 04 11:52:41 crc kubenswrapper[4797]: I0104 11:52:41.390794 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 04 11:52:41 crc kubenswrapper[4797]: I0104 11:52:41.770080 4797 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 04 11:52:41 crc kubenswrapper[4797]: I0104 11:52:41.770697 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 04 11:52:42 crc kubenswrapper[4797]: I0104 11:52:42.596422 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 04 11:52:42 crc kubenswrapper[4797]: I0104 11:52:42.638517 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:52:42 crc kubenswrapper[4797]: I0104 11:52:42.878203 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 04 11:52:42 crc kubenswrapper[4797]: I0104 11:52:42.987823 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 04 11:52:43 crc kubenswrapper[4797]: I0104 11:52:43.213161 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 04 11:52:44 crc kubenswrapper[4797]: I0104 11:52:44.009145 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 04 11:52:44 crc kubenswrapper[4797]: I0104 11:52:44.731780 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.129271 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.139413 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.252566 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.293353 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.352324 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.515783 4797 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.525258 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pj8fz","openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.525364 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-4dkd6","openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:45 crc kubenswrapper[4797]: E0104 11:52:45.525706 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" containerName="installer" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.525738 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" containerName="installer" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.526042 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="afa53643-f607-45d0-b4f4-807edcd89c74" containerName="installer" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.527571 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.527782 4797 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.527837 4797 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="88b14f35-7706-4b2d-91bb-da0f0635076c" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.535167 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.535665 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.536188 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.536973 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537114 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537125 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537320 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537371 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537403 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.537767 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.541520 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.542198 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.543291 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.546092 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.548877 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.564376 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.564352419 podStartE2EDuration="13.564352419s" podCreationTimestamp="2026-01-04 11:52:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:45.560550365 +0000 UTC m=+264.417737114" watchObservedRunningTime="2026-01-04 11:52:45.564352419 +0000 UTC m=+264.421539138" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.569214 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.584974 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585109 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585158 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585292 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585343 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585469 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-policies\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585551 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njb47\" (UniqueName: \"kubernetes.io/projected/c212d715-7c7e-4064-8e39-68dbc24ae19d-kube-api-access-njb47\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585671 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-dir\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585782 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.585860 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.586035 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.586121 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.586180 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.586247 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.672108 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.687890 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.687933 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.687954 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.687983 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688021 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688038 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688056 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688076 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688093 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688116 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-policies\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688143 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njb47\" (UniqueName: \"kubernetes.io/projected/c212d715-7c7e-4064-8e39-68dbc24ae19d-kube-api-access-njb47\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688180 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-dir\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688199 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688221 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.688546 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-dir\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.689091 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.689665 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-audit-policies\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.690099 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.690174 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-service-ca\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.695404 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-router-certs\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.695461 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.695759 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.695818 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-session\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.695977 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.696697 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.700653 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-error\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.700696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c212d715-7c7e-4064-8e39-68dbc24ae19d-v4-0-config-user-template-login\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.724200 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njb47\" (UniqueName: \"kubernetes.io/projected/c212d715-7c7e-4064-8e39-68dbc24ae19d-kube-api-access-njb47\") pod \"oauth-openshift-7794f9c69b-4dkd6\" (UID: \"c212d715-7c7e-4064-8e39-68dbc24ae19d\") " pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.839631 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-4dkd6"] Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.860536 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.971339 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 04 11:52:45 crc kubenswrapper[4797]: I0104 11:52:45.976747 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.315262 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.381417 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.415917 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.521755 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.558341 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.901972 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.974420 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.980935 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.996465 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 04 11:52:46 crc kubenswrapper[4797]: I0104 11:52:46.999637 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.088358 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.404469 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.417127 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.482056 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0" path="/var/lib/kubelet/pods/8a37c4fd-56d2-4bbc-9d4b-7003aaf31ea0/volumes" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.510860 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.512690 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.552336 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.555608 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.585788 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.722366 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.777349 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.936563 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 04 11:52:47 crc kubenswrapper[4797]: I0104 11:52:47.983419 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.062784 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.134065 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.197757 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.289439 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.367198 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.370573 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.604743 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.708677 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.928765 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 04 11:52:48 crc kubenswrapper[4797]: I0104 11:52:48.948046 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.063715 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.073235 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.136230 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 04 11:52:49 crc kubenswrapper[4797]: E0104 11:52:49.152355 4797 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 04 11:52:49 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84" Netns:"/var/run/netns/918fa10e-f762-4947-b204-9a1a321e1de6" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:49 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:49 crc kubenswrapper[4797]: > Jan 04 11:52:49 crc kubenswrapper[4797]: E0104 11:52:49.152458 4797 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 04 11:52:49 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84" Netns:"/var/run/netns/918fa10e-f762-4947-b204-9a1a321e1de6" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:49 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:49 crc kubenswrapper[4797]: > pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:49 crc kubenswrapper[4797]: E0104 11:52:49.152493 4797 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Jan 04 11:52:49 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84" Netns:"/var/run/netns/918fa10e-f762-4947-b204-9a1a321e1de6" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:49 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:49 crc kubenswrapper[4797]: > pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:49 crc kubenswrapper[4797]: E0104 11:52:49.152610 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-7794f9c69b-4dkd6_openshift-authentication(c212d715-7c7e-4064-8e39-68dbc24ae19d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-7794f9c69b-4dkd6_openshift-authentication(c212d715-7c7e-4064-8e39-68dbc24ae19d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84\\\" Netns:\\\"/var/run/netns/918fa10e-f762-4947-b204-9a1a321e1de6\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=9fe0de3d9a77ea07236d19cba0d5787679266bd6f759b924b3640fea1beafe84;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod \\\"oauth-openshift-7794f9c69b-4dkd6\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" podUID="c212d715-7c7e-4064-8e39-68dbc24ae19d" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.158088 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.228089 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.305457 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.318107 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.319146 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.336689 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.363059 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.411747 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.411823 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.427119 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.430324 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.521674 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.649443 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.652068 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.653695 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.690099 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.814034 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.884852 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.906760 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.911809 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.928118 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.946052 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.946744 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:49 crc kubenswrapper[4797]: I0104 11:52:49.988323 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.082547 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.095580 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.204176 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.241525 4797 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.258531 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.277067 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.279666 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.405932 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.439863 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.494082 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.509583 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.623912 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.689293 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.730245 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.952752 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.955768 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 04 11:52:50 crc kubenswrapper[4797]: I0104 11:52:50.960102 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.057607 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.132206 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.137121 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.157768 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.259468 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.335820 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.410088 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.518201 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.552390 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.678313 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.769863 4797 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.769920 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.769980 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.770819 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"557fa4c45b7dc26914a024ddbcff562299ac8089e8137b2cfe3f8675506f5dab"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.770974 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://557fa4c45b7dc26914a024ddbcff562299ac8089e8137b2cfe3f8675506f5dab" gracePeriod=30 Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.815863 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.872896 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.884767 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 04 11:52:51 crc kubenswrapper[4797]: I0104 11:52:51.920082 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.023931 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.037966 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.040225 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.075036 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.094083 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.120797 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.247018 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.283289 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.313312 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.328888 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.421663 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.464074 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.569533 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.571816 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.578960 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.628692 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.673995 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.678874 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.686428 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.741452 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.824601 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.859829 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 04 11:52:52 crc kubenswrapper[4797]: I0104 11:52:52.944302 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.003441 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.071289 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.073676 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.091238 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.270900 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 04 11:52:53 crc kubenswrapper[4797]: E0104 11:52:53.295131 4797 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 04 11:52:53 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff" Netns:"/var/run/netns/e1bd1bef-57b5-4563-a597-e03ee7e3b4ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:53 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:53 crc kubenswrapper[4797]: > Jan 04 11:52:53 crc kubenswrapper[4797]: E0104 11:52:53.295189 4797 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 04 11:52:53 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff" Netns:"/var/run/netns/e1bd1bef-57b5-4563-a597-e03ee7e3b4ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:53 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:53 crc kubenswrapper[4797]: > pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:53 crc kubenswrapper[4797]: E0104 11:52:53.295207 4797 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Jan 04 11:52:53 crc kubenswrapper[4797]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff" Netns:"/var/run/netns/e1bd1bef-57b5-4563-a597-e03ee7e3b4ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod "oauth-openshift-7794f9c69b-4dkd6" not found Jan 04 11:52:53 crc kubenswrapper[4797]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 04 11:52:53 crc kubenswrapper[4797]: > pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:52:53 crc kubenswrapper[4797]: E0104 11:52:53.295287 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-7794f9c69b-4dkd6_openshift-authentication(c212d715-7c7e-4064-8e39-68dbc24ae19d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-7794f9c69b-4dkd6_openshift-authentication(c212d715-7c7e-4064-8e39-68dbc24ae19d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-7794f9c69b-4dkd6_openshift-authentication_c212d715-7c7e-4064-8e39-68dbc24ae19d_0(037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff): error adding pod openshift-authentication_oauth-openshift-7794f9c69b-4dkd6 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff\\\" Netns:\\\"/var/run/netns/e1bd1bef-57b5-4563-a597-e03ee7e3b4ad\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-7794f9c69b-4dkd6;K8S_POD_INFRA_CONTAINER_ID=037b7c594aa385dcef9f77589459f51bc0489633ccd61b89e6772e2ff3367fff;K8S_POD_UID=c212d715-7c7e-4064-8e39-68dbc24ae19d\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6] networking: Multus: [openshift-authentication/oauth-openshift-7794f9c69b-4dkd6/c212d715-7c7e-4064-8e39-68dbc24ae19d]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-7794f9c69b-4dkd6 in out of cluster comm: pod \\\"oauth-openshift-7794f9c69b-4dkd6\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" podUID="c212d715-7c7e-4064-8e39-68dbc24ae19d" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.300717 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.371075 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.406641 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.471178 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.484496 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.541746 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.620669 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.781199 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.818141 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.857237 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.907377 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 04 11:52:53 crc kubenswrapper[4797]: I0104 11:52:53.985252 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.121309 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.172462 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.175331 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.198919 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.222328 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.281254 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.294535 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.309029 4797 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.313031 4797 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.313402 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90" gracePeriod=5 Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.404962 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.452943 4797 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.453807 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.505599 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.545316 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.566170 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.678691 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.696671 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.732927 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.733165 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.737540 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.746055 4797 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.755484 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.821485 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.860985 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.922377 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.930196 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.959601 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 04 11:52:54 crc kubenswrapper[4797]: I0104 11:52:54.962763 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.142169 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.243646 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.261454 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.278762 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.294566 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.424062 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.554651 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.572037 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.586038 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.752930 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 04 11:52:55 crc kubenswrapper[4797]: I0104 11:52:55.991767 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.016387 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.019786 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.167777 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.363881 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.541619 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.592747 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.617431 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.626057 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.708460 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.783794 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.809225 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.830535 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 04 11:52:56 crc kubenswrapper[4797]: I0104 11:52:56.969275 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.124569 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.188969 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.420578 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.454431 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.563943 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.635864 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.653520 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.800984 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.979458 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 04 11:52:57 crc kubenswrapper[4797]: I0104 11:52:57.991824 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.014141 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.171842 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.344071 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.393081 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.486258 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.687658 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.698613 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.726488 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.735504 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.739467 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.771069 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:52:58 crc kubenswrapper[4797]: I0104 11:52:58.938801 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.026281 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.056821 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.190363 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.221711 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.417373 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.418465 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.528171 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.691449 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.809822 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.919290 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 04 11:52:59 crc kubenswrapper[4797]: I0104 11:52:59.919384 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.016976 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.017104 4797 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90" exitCode=137 Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.017163 4797 scope.go:117] "RemoveContainer" containerID="6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.017239 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.044433 4797 scope.go:117] "RemoveContainer" containerID="6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90" Jan 04 11:53:00 crc kubenswrapper[4797]: E0104 11:53:00.045015 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90\": container with ID starting with 6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90 not found: ID does not exist" containerID="6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.045100 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90"} err="failed to get container status \"6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90\": rpc error: code = NotFound desc = could not find container \"6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90\": container with ID starting with 6993ad300a751ab43176a02fe0ebe5ec13926f9cafaff6b9a74ce55880c54a90 not found: ID does not exist" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.063839 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.114728 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.114805 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.114842 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.114933 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.114957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115249 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115303 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115347 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115388 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115545 4797 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115576 4797 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115604 4797 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.115629 4797 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.129221 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.216849 4797 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.450918 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 04 11:53:00 crc kubenswrapper[4797]: I0104 11:53:00.603982 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 04 11:53:01 crc kubenswrapper[4797]: I0104 11:53:01.290760 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 04 11:53:01 crc kubenswrapper[4797]: I0104 11:53:01.487459 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 04 11:53:01 crc kubenswrapper[4797]: I0104 11:53:01.897689 4797 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 04 11:53:02 crc kubenswrapper[4797]: I0104 11:53:02.253071 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 04 11:53:02 crc kubenswrapper[4797]: I0104 11:53:02.827155 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 04 11:53:08 crc kubenswrapper[4797]: I0104 11:53:08.474089 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:53:08 crc kubenswrapper[4797]: I0104 11:53:08.474952 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:53:08 crc kubenswrapper[4797]: I0104 11:53:08.947358 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7794f9c69b-4dkd6"] Jan 04 11:53:09 crc kubenswrapper[4797]: I0104 11:53:09.080661 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" event={"ID":"c212d715-7c7e-4064-8e39-68dbc24ae19d","Type":"ContainerStarted","Data":"42df85b1c14fc1e56731ebca720d7fdab204fd6133531b72072d427279d048cc"} Jan 04 11:53:10 crc kubenswrapper[4797]: I0104 11:53:10.092513 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" event={"ID":"c212d715-7c7e-4064-8e39-68dbc24ae19d","Type":"ContainerStarted","Data":"7b35551c7339c1055255e49e43cce036b39f0d1f3150b5e533bfdf23625bbfb9"} Jan 04 11:53:10 crc kubenswrapper[4797]: I0104 11:53:10.092977 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:53:10 crc kubenswrapper[4797]: I0104 11:53:10.121831 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" podStartSLOduration=82.121809252 podStartE2EDuration="1m22.121809252s" podCreationTimestamp="2026-01-04 11:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:10.120425744 +0000 UTC m=+288.977612483" watchObservedRunningTime="2026-01-04 11:53:10.121809252 +0000 UTC m=+288.978995971" Jan 04 11:53:10 crc kubenswrapper[4797]: I0104 11:53:10.291737 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7794f9c69b-4dkd6" Jan 04 11:53:21 crc kubenswrapper[4797]: I0104 11:53:21.310111 4797 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 04 11:53:22 crc kubenswrapper[4797]: I0104 11:53:22.174028 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 04 11:53:22 crc kubenswrapper[4797]: I0104 11:53:22.175704 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 04 11:53:22 crc kubenswrapper[4797]: I0104 11:53:22.175750 4797 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="557fa4c45b7dc26914a024ddbcff562299ac8089e8137b2cfe3f8675506f5dab" exitCode=137 Jan 04 11:53:22 crc kubenswrapper[4797]: I0104 11:53:22.175786 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"557fa4c45b7dc26914a024ddbcff562299ac8089e8137b2cfe3f8675506f5dab"} Jan 04 11:53:22 crc kubenswrapper[4797]: I0104 11:53:22.175820 4797 scope.go:117] "RemoveContainer" containerID="0992467ed9c8b8a70dd4941348137dd1a68faf3163735f51630ed3ad363a669f" Jan 04 11:53:23 crc kubenswrapper[4797]: I0104 11:53:23.184294 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 04 11:53:23 crc kubenswrapper[4797]: I0104 11:53:23.186162 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5255ad2f498d6a5f9fc68d90190a09aa361679c84e1caf5006df3cd075962b55"} Jan 04 11:53:25 crc kubenswrapper[4797]: I0104 11:53:25.241526 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:53:31 crc kubenswrapper[4797]: I0104 11:53:31.770400 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:53:31 crc kubenswrapper[4797]: I0104 11:53:31.777962 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:53:32 crc kubenswrapper[4797]: I0104 11:53:32.247893 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.512812 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.513582 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerName="controller-manager" containerID="cri-o://e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde" gracePeriod=30 Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.517946 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.519179 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" podUID="56b77313-425b-4096-bfa8-6e9c7fffa300" containerName="route-controller-manager" containerID="cri-o://1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320" gracePeriod=30 Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.867586 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:53:42 crc kubenswrapper[4797]: I0104 11:53:42.948572 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.017350 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dtpz\" (UniqueName: \"kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz\") pod \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.017430 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config\") pod \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.017455 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert\") pod \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.017507 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca\") pod \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.017549 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles\") pod \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\" (UID: \"182dae25-4f4a-4547-99a9-4c1bb50d31ff\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.018417 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "182dae25-4f4a-4547-99a9-4c1bb50d31ff" (UID: "182dae25-4f4a-4547-99a9-4c1bb50d31ff"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.019184 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca" (OuterVolumeSpecName: "client-ca") pod "182dae25-4f4a-4547-99a9-4c1bb50d31ff" (UID: "182dae25-4f4a-4547-99a9-4c1bb50d31ff"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.019417 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config" (OuterVolumeSpecName: "config") pod "182dae25-4f4a-4547-99a9-4c1bb50d31ff" (UID: "182dae25-4f4a-4547-99a9-4c1bb50d31ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.024317 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "182dae25-4f4a-4547-99a9-4c1bb50d31ff" (UID: "182dae25-4f4a-4547-99a9-4c1bb50d31ff"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.025140 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz" (OuterVolumeSpecName: "kube-api-access-2dtpz") pod "182dae25-4f4a-4547-99a9-4c1bb50d31ff" (UID: "182dae25-4f4a-4547-99a9-4c1bb50d31ff"). InnerVolumeSpecName "kube-api-access-2dtpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.118404 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrn7n\" (UniqueName: \"kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n\") pod \"56b77313-425b-4096-bfa8-6e9c7fffa300\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.118460 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca\") pod \"56b77313-425b-4096-bfa8-6e9c7fffa300\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.118508 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert\") pod \"56b77313-425b-4096-bfa8-6e9c7fffa300\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.118581 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config\") pod \"56b77313-425b-4096-bfa8-6e9c7fffa300\" (UID: \"56b77313-425b-4096-bfa8-6e9c7fffa300\") " Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.119024 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.119050 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/182dae25-4f4a-4547-99a9-4c1bb50d31ff-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.119062 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.119097 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/182dae25-4f4a-4547-99a9-4c1bb50d31ff-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.119113 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dtpz\" (UniqueName: \"kubernetes.io/projected/182dae25-4f4a-4547-99a9-4c1bb50d31ff-kube-api-access-2dtpz\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.121611 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config" (OuterVolumeSpecName: "config") pod "56b77313-425b-4096-bfa8-6e9c7fffa300" (UID: "56b77313-425b-4096-bfa8-6e9c7fffa300"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.121938 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca" (OuterVolumeSpecName: "client-ca") pod "56b77313-425b-4096-bfa8-6e9c7fffa300" (UID: "56b77313-425b-4096-bfa8-6e9c7fffa300"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.125477 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n" (OuterVolumeSpecName: "kube-api-access-hrn7n") pod "56b77313-425b-4096-bfa8-6e9c7fffa300" (UID: "56b77313-425b-4096-bfa8-6e9c7fffa300"). InnerVolumeSpecName "kube-api-access-hrn7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.125781 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "56b77313-425b-4096-bfa8-6e9c7fffa300" (UID: "56b77313-425b-4096-bfa8-6e9c7fffa300"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.219737 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56b77313-425b-4096-bfa8-6e9c7fffa300-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.219777 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.219789 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrn7n\" (UniqueName: \"kubernetes.io/projected/56b77313-425b-4096-bfa8-6e9c7fffa300-kube-api-access-hrn7n\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.219803 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/56b77313-425b-4096-bfa8-6e9c7fffa300-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.314421 4797 generic.go:334] "Generic (PLEG): container finished" podID="56b77313-425b-4096-bfa8-6e9c7fffa300" containerID="1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320" exitCode=0 Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.314508 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" event={"ID":"56b77313-425b-4096-bfa8-6e9c7fffa300","Type":"ContainerDied","Data":"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320"} Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.314558 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.314580 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk" event={"ID":"56b77313-425b-4096-bfa8-6e9c7fffa300","Type":"ContainerDied","Data":"631dac42ec3ef117000821fa4d1efc7ca96980bb08938975d3003fd912fe90a0"} Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.314603 4797 scope.go:117] "RemoveContainer" containerID="1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.316375 4797 generic.go:334] "Generic (PLEG): container finished" podID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerID="e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde" exitCode=0 Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.316400 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.316410 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" event={"ID":"182dae25-4f4a-4547-99a9-4c1bb50d31ff","Type":"ContainerDied","Data":"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde"} Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.316447 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6797fc497-lm766" event={"ID":"182dae25-4f4a-4547-99a9-4c1bb50d31ff","Type":"ContainerDied","Data":"987656d4c1b9851cebbd373c6c0b9f6b1cd921a1edf8296d5104ccb43ed6fe62"} Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.336310 4797 scope.go:117] "RemoveContainer" containerID="1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320" Jan 04 11:53:43 crc kubenswrapper[4797]: E0104 11:53:43.336870 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320\": container with ID starting with 1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320 not found: ID does not exist" containerID="1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.336906 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320"} err="failed to get container status \"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320\": rpc error: code = NotFound desc = could not find container \"1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320\": container with ID starting with 1d2cc2ac722ea30e0d37daf003ab080eddbbd7e0e837b1496e22e72d6263d320 not found: ID does not exist" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.336932 4797 scope.go:117] "RemoveContainer" containerID="e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.350540 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.353676 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6797fc497-lm766"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.354351 4797 scope.go:117] "RemoveContainer" containerID="e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde" Jan 04 11:53:43 crc kubenswrapper[4797]: E0104 11:53:43.354734 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde\": container with ID starting with e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde not found: ID does not exist" containerID="e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.354766 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde"} err="failed to get container status \"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde\": rpc error: code = NotFound desc = could not find container \"e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde\": container with ID starting with e2863fbb43187708c8b3ee928996471978580826b1428ff133ba4b39d307bdde not found: ID does not exist" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.357381 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.361076 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77456d887c-n8frk"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.483017 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" path="/var/lib/kubelet/pods/182dae25-4f4a-4547-99a9-4c1bb50d31ff/volumes" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.484118 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56b77313-425b-4096-bfa8-6e9c7fffa300" path="/var/lib/kubelet/pods/56b77313-425b-4096-bfa8-6e9c7fffa300/volumes" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.944820 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:53:43 crc kubenswrapper[4797]: E0104 11:53:43.945546 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945569 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:53:43 crc kubenswrapper[4797]: E0104 11:53:43.945596 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56b77313-425b-4096-bfa8-6e9c7fffa300" containerName="route-controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945608 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="56b77313-425b-4096-bfa8-6e9c7fffa300" containerName="route-controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: E0104 11:53:43.945620 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerName="controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945632 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerName="controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945823 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="182dae25-4f4a-4547-99a9-4c1bb50d31ff" containerName="controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945856 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="56b77313-425b-4096-bfa8-6e9c7fffa300" containerName="route-controller-manager" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.945874 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.946490 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.948356 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.948439 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.948568 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.949098 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.949238 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.951536 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.952735 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.954117 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.955748 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.956370 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.957571 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.957600 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.957643 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.961095 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.963453 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.968631 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:53:43 crc kubenswrapper[4797]: I0104 11:53:43.997854 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132449 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132552 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlgsl\" (UniqueName: \"kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132595 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132631 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhktk\" (UniqueName: \"kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132680 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.132853 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.133028 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.133092 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.133190 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234475 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234567 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234652 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234714 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlgsl\" (UniqueName: \"kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234751 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234784 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhktk\" (UniqueName: \"kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234837 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234874 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.234917 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.235806 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.236544 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.237161 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.237402 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.237896 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.239504 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.243294 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.255204 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhktk\" (UniqueName: \"kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk\") pod \"route-controller-manager-5cb4468b7-rhlzh\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.262563 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlgsl\" (UniqueName: \"kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl\") pod \"controller-manager-6bf96b59c7-czczf\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.275391 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.290408 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.548894 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:53:44 crc kubenswrapper[4797]: I0104 11:53:44.703136 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.331751 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" event={"ID":"94a574a7-2a85-46db-b551-f422ff5e1f3a","Type":"ContainerStarted","Data":"ac5dde255788677ca78507e9286c4cf1cca358d6f00fec1b29aa681cca54f77b"} Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.331981 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" event={"ID":"94a574a7-2a85-46db-b551-f422ff5e1f3a","Type":"ContainerStarted","Data":"1ba918ee69fe85596268fb6b73427bbc40564942f72cd14c95814ff276b7c530"} Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.332015 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.333305 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" event={"ID":"52cf0c81-6566-49ea-b7fe-1773d9a1f325","Type":"ContainerStarted","Data":"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044"} Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.333364 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" event={"ID":"52cf0c81-6566-49ea-b7fe-1773d9a1f325","Type":"ContainerStarted","Data":"c6b2e7607c8b04cb8a54114af92222c2a7b37a18d6d6de8fec00ce8b5e0aa346"} Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.334030 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.337319 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.338107 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.350869 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" podStartSLOduration=3.350843474 podStartE2EDuration="3.350843474s" podCreationTimestamp="2026-01-04 11:53:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:45.345837706 +0000 UTC m=+324.203024435" watchObservedRunningTime="2026-01-04 11:53:45.350843474 +0000 UTC m=+324.208030203" Jan 04 11:53:45 crc kubenswrapper[4797]: I0104 11:53:45.366238 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" podStartSLOduration=3.366220067 podStartE2EDuration="3.366220067s" podCreationTimestamp="2026-01-04 11:53:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:45.365469997 +0000 UTC m=+324.222656706" watchObservedRunningTime="2026-01-04 11:53:45.366220067 +0000 UTC m=+324.223406796" Jan 04 11:53:49 crc kubenswrapper[4797]: I0104 11:53:49.494294 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:53:49 crc kubenswrapper[4797]: I0104 11:53:49.494776 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:54:19 crc kubenswrapper[4797]: I0104 11:54:19.494349 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:54:19 crc kubenswrapper[4797]: I0104 11:54:19.495438 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.126429 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jqh8d"] Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.131066 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.139865 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jqh8d"] Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308600 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308847 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-bound-sa-token\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308877 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-certificates\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308898 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-tls\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308946 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-trusted-ca\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.308966 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a03e3423-dbb4-4ba0-b022-5e68bb184f82-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.309009 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a03e3423-dbb4-4ba0-b022-5e68bb184f82-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.309154 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr8sx\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-kube-api-access-zr8sx\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.331313 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.410528 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-trusted-ca\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.410612 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a03e3423-dbb4-4ba0-b022-5e68bb184f82-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.410731 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a03e3423-dbb4-4ba0-b022-5e68bb184f82-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.410831 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr8sx\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-kube-api-access-zr8sx\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.410917 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-bound-sa-token\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.411017 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-certificates\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.411099 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-tls\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.411379 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a03e3423-dbb4-4ba0-b022-5e68bb184f82-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.412430 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-certificates\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.412529 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a03e3423-dbb4-4ba0-b022-5e68bb184f82-trusted-ca\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.417374 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-registry-tls\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.418923 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a03e3423-dbb4-4ba0-b022-5e68bb184f82-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.435429 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-bound-sa-token\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.442304 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr8sx\" (UniqueName: \"kubernetes.io/projected/a03e3423-dbb4-4ba0-b022-5e68bb184f82-kube-api-access-zr8sx\") pod \"image-registry-66df7c8f76-jqh8d\" (UID: \"a03e3423-dbb4-4ba0-b022-5e68bb184f82\") " pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.449443 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:33 crc kubenswrapper[4797]: I0104 11:54:33.936875 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jqh8d"] Jan 04 11:54:34 crc kubenswrapper[4797]: I0104 11:54:34.644120 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" event={"ID":"a03e3423-dbb4-4ba0-b022-5e68bb184f82","Type":"ContainerStarted","Data":"25a90b6cac132769049e61a3a589c7e832cd3a6a1f39efd0391ab13dea917cde"} Jan 04 11:54:34 crc kubenswrapper[4797]: I0104 11:54:34.645672 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:34 crc kubenswrapper[4797]: I0104 11:54:34.646317 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" event={"ID":"a03e3423-dbb4-4ba0-b022-5e68bb184f82","Type":"ContainerStarted","Data":"9571b52617fa690bef12e85d1242e4f7c640cf29b341eb21888855080ebe662d"} Jan 04 11:54:34 crc kubenswrapper[4797]: I0104 11:54:34.664747 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" podStartSLOduration=1.664726953 podStartE2EDuration="1.664726953s" podCreationTimestamp="2026-01-04 11:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:54:34.662080818 +0000 UTC m=+373.519267527" watchObservedRunningTime="2026-01-04 11:54:34.664726953 +0000 UTC m=+373.521913662" Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.918875 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.920031 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2wl2p" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="registry-server" containerID="cri-o://fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661" gracePeriod=30 Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.929323 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.929533 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lrkgj" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="registry-server" containerID="cri-o://298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4" gracePeriod=30 Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.958888 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.959231 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerName="marketplace-operator" containerID="cri-o://c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087" gracePeriod=30 Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.963543 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.963933 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m2j8n" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="registry-server" containerID="cri-o://b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe" gracePeriod=30 Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.965107 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.965516 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-smn67" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="registry-server" containerID="cri-o://8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320" gracePeriod=30 Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.967870 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-29fg5"] Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.968522 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:38 crc kubenswrapper[4797]: I0104 11:54:38.987376 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-29fg5"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.101963 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.102059 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhjmh\" (UniqueName: \"kubernetes.io/projected/8e644b6e-1006-4761-a4e9-b0af15833725-kube-api-access-jhjmh\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.102110 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.203682 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.203923 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhjmh\" (UniqueName: \"kubernetes.io/projected/8e644b6e-1006-4761-a4e9-b0af15833725-kube-api-access-jhjmh\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.203956 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.205123 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.209766 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8e644b6e-1006-4761-a4e9-b0af15833725-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.225189 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhjmh\" (UniqueName: \"kubernetes.io/projected/8e644b6e-1006-4761-a4e9-b0af15833725-kube-api-access-jhjmh\") pod \"marketplace-operator-79b997595-29fg5\" (UID: \"8e644b6e-1006-4761-a4e9-b0af15833725\") " pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.293871 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.365606 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.400898 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.406773 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.427317 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.505491 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517515 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content\") pod \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517566 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content\") pod \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517596 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89w2c\" (UniqueName: \"kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c\") pod \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517623 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities\") pod \"f97f7bbd-7702-4344-b235-056f577f6b55\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517672 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87cfg\" (UniqueName: \"kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg\") pod \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517712 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities\") pod \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517735 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnht7\" (UniqueName: \"kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7\") pod \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\" (UID: \"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517761 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca\") pod \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517795 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics\") pod \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\" (UID: \"7dcb99cf-0f3c-4288-bcee-937ef73461ce\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517820 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2b59\" (UniqueName: \"kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59\") pod \"f97f7bbd-7702-4344-b235-056f577f6b55\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517873 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities\") pod \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\" (UID: \"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.517911 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content\") pod \"f97f7bbd-7702-4344-b235-056f577f6b55\" (UID: \"f97f7bbd-7702-4344-b235-056f577f6b55\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.519904 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities" (OuterVolumeSpecName: "utilities") pod "f97f7bbd-7702-4344-b235-056f577f6b55" (UID: "f97f7bbd-7702-4344-b235-056f577f6b55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.519951 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities" (OuterVolumeSpecName: "utilities") pod "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" (UID: "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.520168 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7dcb99cf-0f3c-4288-bcee-937ef73461ce" (UID: "7dcb99cf-0f3c-4288-bcee-937ef73461ce"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.520377 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities" (OuterVolumeSpecName: "utilities") pod "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" (UID: "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.522342 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7" (OuterVolumeSpecName: "kube-api-access-vnht7") pod "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" (UID: "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b"). InnerVolumeSpecName "kube-api-access-vnht7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.526724 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg" (OuterVolumeSpecName: "kube-api-access-87cfg") pod "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" (UID: "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5"). InnerVolumeSpecName "kube-api-access-87cfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.529999 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59" (OuterVolumeSpecName: "kube-api-access-h2b59") pod "f97f7bbd-7702-4344-b235-056f577f6b55" (UID: "f97f7bbd-7702-4344-b235-056f577f6b55"). InnerVolumeSpecName "kube-api-access-h2b59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.544347 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7dcb99cf-0f3c-4288-bcee-937ef73461ce" (UID: "7dcb99cf-0f3c-4288-bcee-937ef73461ce"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.544367 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c" (OuterVolumeSpecName: "kube-api-access-89w2c") pod "7dcb99cf-0f3c-4288-bcee-937ef73461ce" (UID: "7dcb99cf-0f3c-4288-bcee-937ef73461ce"). InnerVolumeSpecName "kube-api-access-89w2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.572067 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" (UID: "07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.581004 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" (UID: "97d1cbe8-6a84-4cc0-a4af-0f66635aa60b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.618843 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content\") pod \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.618947 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities\") pod \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619050 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j44zz\" (UniqueName: \"kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz\") pod \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\" (UID: \"03c91ee7-6f6e-4c7c-8501-dd36e81e5421\") " Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619265 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619277 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619291 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619299 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89w2c\" (UniqueName: \"kubernetes.io/projected/7dcb99cf-0f3c-4288-bcee-937ef73461ce-kube-api-access-89w2c\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619308 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619316 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87cfg\" (UniqueName: \"kubernetes.io/projected/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5-kube-api-access-87cfg\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619324 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619334 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnht7\" (UniqueName: \"kubernetes.io/projected/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b-kube-api-access-vnht7\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619342 4797 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619350 4797 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7dcb99cf-0f3c-4288-bcee-937ef73461ce-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619362 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2b59\" (UniqueName: \"kubernetes.io/projected/f97f7bbd-7702-4344-b235-056f577f6b55-kube-api-access-h2b59\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.619706 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities" (OuterVolumeSpecName: "utilities") pod "03c91ee7-6f6e-4c7c-8501-dd36e81e5421" (UID: "03c91ee7-6f6e-4c7c-8501-dd36e81e5421"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.622544 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz" (OuterVolumeSpecName: "kube-api-access-j44zz") pod "03c91ee7-6f6e-4c7c-8501-dd36e81e5421" (UID: "03c91ee7-6f6e-4c7c-8501-dd36e81e5421"). InnerVolumeSpecName "kube-api-access-j44zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.647852 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03c91ee7-6f6e-4c7c-8501-dd36e81e5421" (UID: "03c91ee7-6f6e-4c7c-8501-dd36e81e5421"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.664486 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f97f7bbd-7702-4344-b235-056f577f6b55" (UID: "f97f7bbd-7702-4344-b235-056f577f6b55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.673288 4797 generic.go:334] "Generic (PLEG): container finished" podID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerID="b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe" exitCode=0 Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.673341 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerDied","Data":"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.673363 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2j8n" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.673659 4797 scope.go:117] "RemoveContainer" containerID="b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.673578 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2j8n" event={"ID":"03c91ee7-6f6e-4c7c-8501-dd36e81e5421","Type":"ContainerDied","Data":"40550f23c70f29925f7769ad6f70b4d63b1d68374dc3c74d862bbdfefbc01ff8"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.675170 4797 generic.go:334] "Generic (PLEG): container finished" podID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerID="c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087" exitCode=0 Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.675223 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" event={"ID":"7dcb99cf-0f3c-4288-bcee-937ef73461ce","Type":"ContainerDied","Data":"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.675229 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.675239 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q5lt7" event={"ID":"7dcb99cf-0f3c-4288-bcee-937ef73461ce","Type":"ContainerDied","Data":"2f685321bef4a9a003a7eea31fb749d1b4a23140fc830f3f1233a8709cb079f1"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.678298 4797 generic.go:334] "Generic (PLEG): container finished" podID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerID="fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661" exitCode=0 Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.678360 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wl2p" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.678361 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerDied","Data":"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.678428 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wl2p" event={"ID":"07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5","Type":"ContainerDied","Data":"9f9e5967fda4d11f0eacc98ae13680d098a8ee8c845a6f00fe3e9a057f1eb855"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.681775 4797 generic.go:334] "Generic (PLEG): container finished" podID="f97f7bbd-7702-4344-b235-056f577f6b55" containerID="8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320" exitCode=0 Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.681839 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerDied","Data":"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.681868 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smn67" event={"ID":"f97f7bbd-7702-4344-b235-056f577f6b55","Type":"ContainerDied","Data":"25397d8a3d0e7935f3b94173aeb0d1f5601d85ee8fcaf18a5413329c347ec3de"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.681912 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smn67" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.687584 4797 generic.go:334] "Generic (PLEG): container finished" podID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerID="298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4" exitCode=0 Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.687637 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerDied","Data":"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.687659 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lrkgj" event={"ID":"97d1cbe8-6a84-4cc0-a4af-0f66635aa60b","Type":"ContainerDied","Data":"a6fc021a92a99fb3b550c1452543e06b0d434478d7dcba32c509d4a6832caa7c"} Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.687663 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lrkgj" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.691093 4797 scope.go:117] "RemoveContainer" containerID="25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.720650 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.720682 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f7bbd-7702-4344-b235-056f577f6b55-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.720695 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.720709 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j44zz\" (UniqueName: \"kubernetes.io/projected/03c91ee7-6f6e-4c7c-8501-dd36e81e5421-kube-api-access-j44zz\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.723493 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.730186 4797 scope.go:117] "RemoveContainer" containerID="33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.731849 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2wl2p"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.738763 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.747242 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q5lt7"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.752094 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.753977 4797 scope.go:117] "RemoveContainer" containerID="b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.754749 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe\": container with ID starting with b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe not found: ID does not exist" containerID="b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.754855 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe"} err="failed to get container status \"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe\": rpc error: code = NotFound desc = could not find container \"b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe\": container with ID starting with b81cc30628cfdd59f5d520ff749dabe1d4ce43ffbf0422d020e7d27eba8f61fe not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.754901 4797 scope.go:117] "RemoveContainer" containerID="25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.755239 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74\": container with ID starting with 25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74 not found: ID does not exist" containerID="25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.755358 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74"} err="failed to get container status \"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74\": rpc error: code = NotFound desc = could not find container \"25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74\": container with ID starting with 25002f92abe73ba9d04527cdb4a254de02be17ca396347717d413d3df4455f74 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.755490 4797 scope.go:117] "RemoveContainer" containerID="33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.756091 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2j8n"] Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.757153 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e\": container with ID starting with 33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e not found: ID does not exist" containerID="33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.757254 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e"} err="failed to get container status \"33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e\": rpc error: code = NotFound desc = could not find container \"33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e\": container with ID starting with 33f26dd4781a47930b7d54f9e7e3615b9715a7b2bbbba8afc2fc514f8f892f6e not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.757379 4797 scope.go:117] "RemoveContainer" containerID="c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.760938 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.765553 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-smn67"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.770656 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.780500 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lrkgj"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.785473 4797 scope.go:117] "RemoveContainer" containerID="c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.785934 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087\": container with ID starting with c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087 not found: ID does not exist" containerID="c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.785963 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087"} err="failed to get container status \"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087\": rpc error: code = NotFound desc = could not find container \"c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087\": container with ID starting with c7c9b525b0a20c3739525a5e2eee1a6afa27c264eaf1b06954baa1bc15a7f087 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.786000 4797 scope.go:117] "RemoveContainer" containerID="fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.802778 4797 scope.go:117] "RemoveContainer" containerID="bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.810785 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-29fg5"] Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.828812 4797 scope.go:117] "RemoveContainer" containerID="188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.859581 4797 scope.go:117] "RemoveContainer" containerID="fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.859910 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661\": container with ID starting with fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661 not found: ID does not exist" containerID="fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.859945 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661"} err="failed to get container status \"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661\": rpc error: code = NotFound desc = could not find container \"fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661\": container with ID starting with fb11cf046fd832c0d9f179bca3d18502f57cfebf2593495f9b4767ca09d79661 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.859971 4797 scope.go:117] "RemoveContainer" containerID="bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.860663 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88\": container with ID starting with bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88 not found: ID does not exist" containerID="bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.860692 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88"} err="failed to get container status \"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88\": rpc error: code = NotFound desc = could not find container \"bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88\": container with ID starting with bb2e2829875267321396d89bc56cea089c5b99bcc381a4058ca583729fc9ac88 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.860727 4797 scope.go:117] "RemoveContainer" containerID="188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.860977 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe\": container with ID starting with 188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe not found: ID does not exist" containerID="188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.861044 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe"} err="failed to get container status \"188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe\": rpc error: code = NotFound desc = could not find container \"188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe\": container with ID starting with 188e043a634da46b78926871d2013504385af04af06dc2c4fa516a82aefc7cfe not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.861074 4797 scope.go:117] "RemoveContainer" containerID="8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.875795 4797 scope.go:117] "RemoveContainer" containerID="5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.892231 4797 scope.go:117] "RemoveContainer" containerID="ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.920881 4797 scope.go:117] "RemoveContainer" containerID="8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.921363 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320\": container with ID starting with 8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320 not found: ID does not exist" containerID="8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.921410 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320"} err="failed to get container status \"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320\": rpc error: code = NotFound desc = could not find container \"8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320\": container with ID starting with 8e8d67492152f3ad2ae391ff061492372d1c505fb6f2b88feb834d64ea1ca320 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.921442 4797 scope.go:117] "RemoveContainer" containerID="5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.922513 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837\": container with ID starting with 5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837 not found: ID does not exist" containerID="5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.922538 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837"} err="failed to get container status \"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837\": rpc error: code = NotFound desc = could not find container \"5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837\": container with ID starting with 5a32b6eb0594780d5a46a218bc9886c6ecb8b109896dcce5944994519f009837 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.922553 4797 scope.go:117] "RemoveContainer" containerID="ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9" Jan 04 11:54:39 crc kubenswrapper[4797]: E0104 11:54:39.922876 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9\": container with ID starting with ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9 not found: ID does not exist" containerID="ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.922909 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9"} err="failed to get container status \"ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9\": rpc error: code = NotFound desc = could not find container \"ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9\": container with ID starting with ee07eb967207d442c8c64ade3cc29dd06267f97434adbaa16a2b0ef1e002eab9 not found: ID does not exist" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.922938 4797 scope.go:117] "RemoveContainer" containerID="298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.950160 4797 scope.go:117] "RemoveContainer" containerID="2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503" Jan 04 11:54:39 crc kubenswrapper[4797]: I0104 11:54:39.974349 4797 scope.go:117] "RemoveContainer" containerID="e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.005275 4797 scope.go:117] "RemoveContainer" containerID="298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.009255 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4\": container with ID starting with 298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4 not found: ID does not exist" containerID="298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.009314 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4"} err="failed to get container status \"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4\": rpc error: code = NotFound desc = could not find container \"298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4\": container with ID starting with 298597dfb5a0c934a79a0c8c058939dc3cd2b92726b20826b50ccd81d0f29ac4 not found: ID does not exist" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.009364 4797 scope.go:117] "RemoveContainer" containerID="2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.009864 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503\": container with ID starting with 2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503 not found: ID does not exist" containerID="2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.009892 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503"} err="failed to get container status \"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503\": rpc error: code = NotFound desc = could not find container \"2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503\": container with ID starting with 2915a99f31ab47b1329914372cf223dd5229ee36a46f2e76d4c4bb3cac7c7503 not found: ID does not exist" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.009928 4797 scope.go:117] "RemoveContainer" containerID="e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.010327 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a\": container with ID starting with e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a not found: ID does not exist" containerID="e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.010374 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a"} err="failed to get container status \"e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a\": rpc error: code = NotFound desc = could not find container \"e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a\": container with ID starting with e16359aea03b0966b0e3cc81b2754ec75c42f03b198aaa3bd0bc471a0e29f85a not found: ID does not exist" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405455 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7pzcc"] Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.405904 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405916 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.405926 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405932 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.405942 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405949 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.405956 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405961 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.405970 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.405976 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406000 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406006 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406017 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406022 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406031 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406036 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406044 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerName="marketplace-operator" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406050 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerName="marketplace-operator" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406058 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406064 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406075 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406082 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406091 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406096 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="extract-content" Jan 04 11:54:40 crc kubenswrapper[4797]: E0104 11:54:40.406103 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406108 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="extract-utilities" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406193 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406206 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406214 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406223 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" containerName="registry-server" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.406231 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" containerName="marketplace-operator" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.407010 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.412154 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.424313 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7pzcc"] Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.530683 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-utilities\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.530733 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skktd\" (UniqueName: \"kubernetes.io/projected/48f385bf-de22-4bcc-9f95-ad5fc822c631-kube-api-access-skktd\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.530766 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-catalog-content\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.631777 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-utilities\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.631831 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skktd\" (UniqueName: \"kubernetes.io/projected/48f385bf-de22-4bcc-9f95-ad5fc822c631-kube-api-access-skktd\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.632574 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-utilities\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.632619 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-catalog-content\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.632634 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f385bf-de22-4bcc-9f95-ad5fc822c631-catalog-content\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.657240 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skktd\" (UniqueName: \"kubernetes.io/projected/48f385bf-de22-4bcc-9f95-ad5fc822c631-kube-api-access-skktd\") pod \"redhat-marketplace-7pzcc\" (UID: \"48f385bf-de22-4bcc-9f95-ad5fc822c631\") " pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.704560 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" event={"ID":"8e644b6e-1006-4761-a4e9-b0af15833725","Type":"ContainerStarted","Data":"f416954af7277e3da53986e0e52036304fd469fc3dc201d37a7a77ec35c3fa00"} Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.704598 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" event={"ID":"8e644b6e-1006-4761-a4e9-b0af15833725","Type":"ContainerStarted","Data":"a7db7b0411483fd0927338d59410865591cc7de848f2fb0b94cd8cb8e015d44e"} Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.705302 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.709103 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.719862 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:40 crc kubenswrapper[4797]: I0104 11:54:40.724944 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-29fg5" podStartSLOduration=2.724924405 podStartE2EDuration="2.724924405s" podCreationTimestamp="2026-01-04 11:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:54:40.720025176 +0000 UTC m=+379.577211895" watchObservedRunningTime="2026-01-04 11:54:40.724924405 +0000 UTC m=+379.582111124" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.015786 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zt2ck"] Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.017403 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.019431 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.024374 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zt2ck"] Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.113882 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7pzcc"] Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.156234 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-catalog-content\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.156295 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-utilities\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.156321 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt9cg\" (UniqueName: \"kubernetes.io/projected/8cd0fba6-5837-4843-a86c-9443222d1961-kube-api-access-pt9cg\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.257951 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-catalog-content\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.258094 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-utilities\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.258145 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt9cg\" (UniqueName: \"kubernetes.io/projected/8cd0fba6-5837-4843-a86c-9443222d1961-kube-api-access-pt9cg\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.258484 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-catalog-content\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.258534 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd0fba6-5837-4843-a86c-9443222d1961-utilities\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.288800 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt9cg\" (UniqueName: \"kubernetes.io/projected/8cd0fba6-5837-4843-a86c-9443222d1961-kube-api-access-pt9cg\") pod \"redhat-operators-zt2ck\" (UID: \"8cd0fba6-5837-4843-a86c-9443222d1961\") " pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.348254 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.485325 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c91ee7-6f6e-4c7c-8501-dd36e81e5421" path="/var/lib/kubelet/pods/03c91ee7-6f6e-4c7c-8501-dd36e81e5421/volumes" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.489038 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5" path="/var/lib/kubelet/pods/07ab4232-cce0-4cc1-8d46-6fe5d9c78ca5/volumes" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.489766 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dcb99cf-0f3c-4288-bcee-937ef73461ce" path="/var/lib/kubelet/pods/7dcb99cf-0f3c-4288-bcee-937ef73461ce/volumes" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.491299 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97d1cbe8-6a84-4cc0-a4af-0f66635aa60b" path="/var/lib/kubelet/pods/97d1cbe8-6a84-4cc0-a4af-0f66635aa60b/volumes" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.492084 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f97f7bbd-7702-4344-b235-056f577f6b55" path="/var/lib/kubelet/pods/f97f7bbd-7702-4344-b235-056f577f6b55/volumes" Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.582363 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zt2ck"] Jan 04 11:54:41 crc kubenswrapper[4797]: W0104 11:54:41.590181 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cd0fba6_5837_4843_a86c_9443222d1961.slice/crio-ad99eed8a3e021904bda8850e49da4230072c0f5f5b014da2d299fc3a38cf9c6 WatchSource:0}: Error finding container ad99eed8a3e021904bda8850e49da4230072c0f5f5b014da2d299fc3a38cf9c6: Status 404 returned error can't find the container with id ad99eed8a3e021904bda8850e49da4230072c0f5f5b014da2d299fc3a38cf9c6 Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.709758 4797 generic.go:334] "Generic (PLEG): container finished" podID="48f385bf-de22-4bcc-9f95-ad5fc822c631" containerID="ef5451a0e07f845629686fc07a7364e1e79adcec407351348f1f869fe6d0e8de" exitCode=0 Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.709811 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7pzcc" event={"ID":"48f385bf-de22-4bcc-9f95-ad5fc822c631","Type":"ContainerDied","Data":"ef5451a0e07f845629686fc07a7364e1e79adcec407351348f1f869fe6d0e8de"} Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.709868 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7pzcc" event={"ID":"48f385bf-de22-4bcc-9f95-ad5fc822c631","Type":"ContainerStarted","Data":"a7145505d797b402bab49cbd119a081d2f89363ab605350e054799363c8d2f64"} Jan 04 11:54:41 crc kubenswrapper[4797]: I0104 11:54:41.710838 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zt2ck" event={"ID":"8cd0fba6-5837-4843-a86c-9443222d1961","Type":"ContainerStarted","Data":"ad99eed8a3e021904bda8850e49da4230072c0f5f5b014da2d299fc3a38cf9c6"} Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.588440 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.589317 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" podUID="94a574a7-2a85-46db-b551-f422ff5e1f3a" containerName="controller-manager" containerID="cri-o://ac5dde255788677ca78507e9286c4cf1cca358d6f00fec1b29aa681cca54f77b" gracePeriod=30 Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.605123 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.605605 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" podUID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" containerName="route-controller-manager" containerID="cri-o://50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044" gracePeriod=30 Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.723510 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7pzcc" event={"ID":"48f385bf-de22-4bcc-9f95-ad5fc822c631","Type":"ContainerStarted","Data":"14807fe00744ddec4dabc60892e4021b91fa26b52ffca9b7448012809496d641"} Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.727778 4797 generic.go:334] "Generic (PLEG): container finished" podID="8cd0fba6-5837-4843-a86c-9443222d1961" containerID="8d11a20b7fb21f11f59d379a0624a58188b6b357e8984505401b29ee6b76c1dc" exitCode=0 Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.727838 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zt2ck" event={"ID":"8cd0fba6-5837-4843-a86c-9443222d1961","Type":"ContainerDied","Data":"8d11a20b7fb21f11f59d379a0624a58188b6b357e8984505401b29ee6b76c1dc"} Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.731531 4797 generic.go:334] "Generic (PLEG): container finished" podID="94a574a7-2a85-46db-b551-f422ff5e1f3a" containerID="ac5dde255788677ca78507e9286c4cf1cca358d6f00fec1b29aa681cca54f77b" exitCode=0 Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.731610 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" event={"ID":"94a574a7-2a85-46db-b551-f422ff5e1f3a","Type":"ContainerDied","Data":"ac5dde255788677ca78507e9286c4cf1cca358d6f00fec1b29aa681cca54f77b"} Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.802976 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s6f2t"] Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.803844 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.806552 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.820364 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6f2t"] Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.981515 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-catalog-content\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.981868 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-utilities\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.981902 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2xfv\" (UniqueName: \"kubernetes.io/projected/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-kube-api-access-k2xfv\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.985288 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:54:42 crc kubenswrapper[4797]: I0104 11:54:42.993438 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.082433 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-catalog-content\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.082506 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-utilities\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.082547 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2xfv\" (UniqueName: \"kubernetes.io/projected/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-kube-api-access-k2xfv\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.083033 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-catalog-content\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.083207 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-utilities\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.104623 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2xfv\" (UniqueName: \"kubernetes.io/projected/e1ff8919-8dda-4f12-84bf-78f0014b5ec5-kube-api-access-k2xfv\") pod \"certified-operators-s6f2t\" (UID: \"e1ff8919-8dda-4f12-84bf-78f0014b5ec5\") " pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.132184 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183356 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert\") pod \"94a574a7-2a85-46db-b551-f422ff5e1f3a\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config\") pod \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183442 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca\") pod \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183465 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config\") pod \"94a574a7-2a85-46db-b551-f422ff5e1f3a\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183486 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert\") pod \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183519 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca\") pod \"94a574a7-2a85-46db-b551-f422ff5e1f3a\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183542 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhktk\" (UniqueName: \"kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk\") pod \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\" (UID: \"52cf0c81-6566-49ea-b7fe-1773d9a1f325\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183579 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlgsl\" (UniqueName: \"kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl\") pod \"94a574a7-2a85-46db-b551-f422ff5e1f3a\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.183607 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles\") pod \"94a574a7-2a85-46db-b551-f422ff5e1f3a\" (UID: \"94a574a7-2a85-46db-b551-f422ff5e1f3a\") " Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.184444 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca" (OuterVolumeSpecName: "client-ca") pod "94a574a7-2a85-46db-b551-f422ff5e1f3a" (UID: "94a574a7-2a85-46db-b551-f422ff5e1f3a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.184622 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config" (OuterVolumeSpecName: "config") pod "94a574a7-2a85-46db-b551-f422ff5e1f3a" (UID: "94a574a7-2a85-46db-b551-f422ff5e1f3a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.184703 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca" (OuterVolumeSpecName: "client-ca") pod "52cf0c81-6566-49ea-b7fe-1773d9a1f325" (UID: "52cf0c81-6566-49ea-b7fe-1773d9a1f325"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.184809 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config" (OuterVolumeSpecName: "config") pod "52cf0c81-6566-49ea-b7fe-1773d9a1f325" (UID: "52cf0c81-6566-49ea-b7fe-1773d9a1f325"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.185069 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "94a574a7-2a85-46db-b551-f422ff5e1f3a" (UID: "94a574a7-2a85-46db-b551-f422ff5e1f3a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.186652 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "52cf0c81-6566-49ea-b7fe-1773d9a1f325" (UID: "52cf0c81-6566-49ea-b7fe-1773d9a1f325"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.187363 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl" (OuterVolumeSpecName: "kube-api-access-dlgsl") pod "94a574a7-2a85-46db-b551-f422ff5e1f3a" (UID: "94a574a7-2a85-46db-b551-f422ff5e1f3a"). InnerVolumeSpecName "kube-api-access-dlgsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.188641 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk" (OuterVolumeSpecName: "kube-api-access-lhktk") pod "52cf0c81-6566-49ea-b7fe-1773d9a1f325" (UID: "52cf0c81-6566-49ea-b7fe-1773d9a1f325"). InnerVolumeSpecName "kube-api-access-lhktk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.191726 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "94a574a7-2a85-46db-b551-f422ff5e1f3a" (UID: "94a574a7-2a85-46db-b551-f422ff5e1f3a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284478 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlgsl\" (UniqueName: \"kubernetes.io/projected/94a574a7-2a85-46db-b551-f422ff5e1f3a-kube-api-access-dlgsl\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284522 4797 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284535 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a574a7-2a85-46db-b551-f422ff5e1f3a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284547 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284557 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52cf0c81-6566-49ea-b7fe-1773d9a1f325-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284564 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284572 4797 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52cf0c81-6566-49ea-b7fe-1773d9a1f325-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284583 4797 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94a574a7-2a85-46db-b551-f422ff5e1f3a-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.284593 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhktk\" (UniqueName: \"kubernetes.io/projected/52cf0c81-6566-49ea-b7fe-1773d9a1f325-kube-api-access-lhktk\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.324897 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6f2t"] Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.402627 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p48hh"] Jan 04 11:54:43 crc kubenswrapper[4797]: E0104 11:54:43.403070 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a574a7-2a85-46db-b551-f422ff5e1f3a" containerName="controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.403084 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a574a7-2a85-46db-b551-f422ff5e1f3a" containerName="controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: E0104 11:54:43.403100 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" containerName="route-controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.403106 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" containerName="route-controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.403189 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="94a574a7-2a85-46db-b551-f422ff5e1f3a" containerName="controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.403204 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" containerName="route-controller-manager" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.403889 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.405444 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.414453 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p48hh"] Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.587878 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-utilities\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.587931 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2qfc\" (UniqueName: \"kubernetes.io/projected/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-kube-api-access-f2qfc\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.587958 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-catalog-content\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.688815 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-utilities\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.688911 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2qfc\" (UniqueName: \"kubernetes.io/projected/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-kube-api-access-f2qfc\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.689111 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-catalog-content\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.689276 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-utilities\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.689758 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-catalog-content\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.707648 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2qfc\" (UniqueName: \"kubernetes.io/projected/7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1-kube-api-access-f2qfc\") pod \"community-operators-p48hh\" (UID: \"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1\") " pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.728118 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.740076 4797 generic.go:334] "Generic (PLEG): container finished" podID="e1ff8919-8dda-4f12-84bf-78f0014b5ec5" containerID="d119180f013fef56306757e793baa61eb6627e8ee6588e38f030c94736672a85" exitCode=0 Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.740208 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f2t" event={"ID":"e1ff8919-8dda-4f12-84bf-78f0014b5ec5","Type":"ContainerDied","Data":"d119180f013fef56306757e793baa61eb6627e8ee6588e38f030c94736672a85"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.740250 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f2t" event={"ID":"e1ff8919-8dda-4f12-84bf-78f0014b5ec5","Type":"ContainerStarted","Data":"567fb1116a9fd29ad8e180c3b51cae7f3430c69df44b373e7ad571e23600c4ac"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.742746 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.742844 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bf96b59c7-czczf" event={"ID":"94a574a7-2a85-46db-b551-f422ff5e1f3a","Type":"ContainerDied","Data":"1ba918ee69fe85596268fb6b73427bbc40564942f72cd14c95814ff276b7c530"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.743050 4797 scope.go:117] "RemoveContainer" containerID="ac5dde255788677ca78507e9286c4cf1cca358d6f00fec1b29aa681cca54f77b" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.749693 4797 generic.go:334] "Generic (PLEG): container finished" podID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" containerID="50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044" exitCode=0 Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.749857 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" event={"ID":"52cf0c81-6566-49ea-b7fe-1773d9a1f325","Type":"ContainerDied","Data":"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.749916 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" event={"ID":"52cf0c81-6566-49ea-b7fe-1773d9a1f325","Type":"ContainerDied","Data":"c6b2e7607c8b04cb8a54114af92222c2a7b37a18d6d6de8fec00ce8b5e0aa346"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.749999 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.756188 4797 generic.go:334] "Generic (PLEG): container finished" podID="48f385bf-de22-4bcc-9f95-ad5fc822c631" containerID="14807fe00744ddec4dabc60892e4021b91fa26b52ffca9b7448012809496d641" exitCode=0 Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.756267 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7pzcc" event={"ID":"48f385bf-de22-4bcc-9f95-ad5fc822c631","Type":"ContainerDied","Data":"14807fe00744ddec4dabc60892e4021b91fa26b52ffca9b7448012809496d641"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.765924 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zt2ck" event={"ID":"8cd0fba6-5837-4843-a86c-9443222d1961","Type":"ContainerStarted","Data":"7d84d2236b992f08fd9e49abf7bfe2a75b7addf032fea507c98221f02ed17f94"} Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.776426 4797 scope.go:117] "RemoveContainer" containerID="50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.797035 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.805228 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6bf96b59c7-czczf"] Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.810229 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.811907 4797 scope.go:117] "RemoveContainer" containerID="50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.814602 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4468b7-rhlzh"] Jan 04 11:54:43 crc kubenswrapper[4797]: E0104 11:54:43.815125 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044\": container with ID starting with 50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044 not found: ID does not exist" containerID="50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044" Jan 04 11:54:43 crc kubenswrapper[4797]: I0104 11:54:43.815163 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044"} err="failed to get container status \"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044\": rpc error: code = NotFound desc = could not find container \"50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044\": container with ID starting with 50c735241ca6303df1ee104a4692f00edbffe3dcbe610b66bd57693babc40044 not found: ID does not exist" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.007717 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9"] Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.008449 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013026 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013173 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013266 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013393 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013450 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.013462 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.014927 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-f4dc55956-mm77f"] Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.015708 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.021721 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.021721 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.022063 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.022155 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.022220 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.022555 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.025805 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.026755 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9"] Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.029874 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f4dc55956-mm77f"] Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097034 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-config\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097085 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-config\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097114 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-client-ca\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097172 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/567e491a-1ac4-4a7b-bc38-a2c310405476-serving-cert\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097204 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-serving-cert\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097244 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-client-ca\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097275 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-proxy-ca-bundles\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097303 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8ppn\" (UniqueName: \"kubernetes.io/projected/567e491a-1ac4-4a7b-bc38-a2c310405476-kube-api-access-m8ppn\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.097355 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5xgk\" (UniqueName: \"kubernetes.io/projected/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-kube-api-access-t5xgk\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: W0104 11:54:44.142250 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d9ff7e5_585d_49c0_a6cf_b2c1964fe3d1.slice/crio-e6fc3638b4d7bf4e2e9b2cbecc2622dc5595ca127c35cf9cf84df59f1cbe81ec WatchSource:0}: Error finding container e6fc3638b4d7bf4e2e9b2cbecc2622dc5595ca127c35cf9cf84df59f1cbe81ec: Status 404 returned error can't find the container with id e6fc3638b4d7bf4e2e9b2cbecc2622dc5595ca127c35cf9cf84df59f1cbe81ec Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.151406 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p48hh"] Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202746 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-config\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202787 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-config\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202813 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-client-ca\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202839 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/567e491a-1ac4-4a7b-bc38-a2c310405476-serving-cert\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202859 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-serving-cert\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202888 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-client-ca\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202907 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-proxy-ca-bundles\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202923 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8ppn\" (UniqueName: \"kubernetes.io/projected/567e491a-1ac4-4a7b-bc38-a2c310405476-kube-api-access-m8ppn\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.202943 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5xgk\" (UniqueName: \"kubernetes.io/projected/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-kube-api-access-t5xgk\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.204137 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-client-ca\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.204563 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-client-ca\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.205592 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-proxy-ca-bundles\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.205954 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-config\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.205962 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/567e491a-1ac4-4a7b-bc38-a2c310405476-config\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.212898 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/567e491a-1ac4-4a7b-bc38-a2c310405476-serving-cert\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.216534 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-serving-cert\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.219567 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5xgk\" (UniqueName: \"kubernetes.io/projected/5eb4fc93-ceca-46c9-9a22-2a9562b0580c-kube-api-access-t5xgk\") pod \"route-controller-manager-85b547dd98-w2fn9\" (UID: \"5eb4fc93-ceca-46c9-9a22-2a9562b0580c\") " pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.220103 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8ppn\" (UniqueName: \"kubernetes.io/projected/567e491a-1ac4-4a7b-bc38-a2c310405476-kube-api-access-m8ppn\") pod \"controller-manager-f4dc55956-mm77f\" (UID: \"567e491a-1ac4-4a7b-bc38-a2c310405476\") " pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.347688 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:44 crc kubenswrapper[4797]: I0104 11:54:44.362756 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.762401 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9"] Jan 04 11:54:45 crc kubenswrapper[4797]: W0104 11:54:44.766805 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5eb4fc93_ceca_46c9_9a22_2a9562b0580c.slice/crio-75f21179af29d1499c68af446862ad2496193253a412689ddfc0dc3e974eed5e WatchSource:0}: Error finding container 75f21179af29d1499c68af446862ad2496193253a412689ddfc0dc3e974eed5e: Status 404 returned error can't find the container with id 75f21179af29d1499c68af446862ad2496193253a412689ddfc0dc3e974eed5e Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.776445 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f2t" event={"ID":"e1ff8919-8dda-4f12-84bf-78f0014b5ec5","Type":"ContainerStarted","Data":"df8bff9530539bc9e07a759698f68c41a9ed8ffb06b3499b18920f97b035ad69"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.784860 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7pzcc" event={"ID":"48f385bf-de22-4bcc-9f95-ad5fc822c631","Type":"ContainerStarted","Data":"81e8fa6fc29cd51d663fbe4c0ab55efb21ca462a87384d5418a70d1045f93dcf"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.787687 4797 generic.go:334] "Generic (PLEG): container finished" podID="7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1" containerID="06ba608324446cf62f842cb20b2cfd63713b7dc1c0c4aa072aaeed1dd05075ef" exitCode=0 Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.787745 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p48hh" event={"ID":"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1","Type":"ContainerDied","Data":"06ba608324446cf62f842cb20b2cfd63713b7dc1c0c4aa072aaeed1dd05075ef"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.787760 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p48hh" event={"ID":"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1","Type":"ContainerStarted","Data":"e6fc3638b4d7bf4e2e9b2cbecc2622dc5595ca127c35cf9cf84df59f1cbe81ec"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.789906 4797 generic.go:334] "Generic (PLEG): container finished" podID="8cd0fba6-5837-4843-a86c-9443222d1961" containerID="7d84d2236b992f08fd9e49abf7bfe2a75b7addf032fea507c98221f02ed17f94" exitCode=0 Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.789949 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zt2ck" event={"ID":"8cd0fba6-5837-4843-a86c-9443222d1961","Type":"ContainerDied","Data":"7d84d2236b992f08fd9e49abf7bfe2a75b7addf032fea507c98221f02ed17f94"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:44.831801 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7pzcc" podStartSLOduration=2.265570282 podStartE2EDuration="4.831783291s" podCreationTimestamp="2026-01-04 11:54:40 +0000 UTC" firstStartedPulling="2026-01-04 11:54:41.71195286 +0000 UTC m=+380.569139579" lastFinishedPulling="2026-01-04 11:54:44.278165869 +0000 UTC m=+383.135352588" observedRunningTime="2026-01-04 11:54:44.831248166 +0000 UTC m=+383.688434885" watchObservedRunningTime="2026-01-04 11:54:44.831783291 +0000 UTC m=+383.688970000" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.480217 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52cf0c81-6566-49ea-b7fe-1773d9a1f325" path="/var/lib/kubelet/pods/52cf0c81-6566-49ea-b7fe-1773d9a1f325/volumes" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.481341 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94a574a7-2a85-46db-b551-f422ff5e1f3a" path="/var/lib/kubelet/pods/94a574a7-2a85-46db-b551-f422ff5e1f3a/volumes" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.675378 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f4dc55956-mm77f"] Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.806847 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zt2ck" event={"ID":"8cd0fba6-5837-4843-a86c-9443222d1961","Type":"ContainerStarted","Data":"492ec18ac678be8f416a6631d9df5aa2514a3aff550240e7502a15d3c59fb8c3"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.812357 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" event={"ID":"567e491a-1ac4-4a7b-bc38-a2c310405476","Type":"ContainerStarted","Data":"b95b0b58b6387fc9dacfc585be4232b029f56faf5e66ce612bd921d9f8ec47cc"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.812398 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" event={"ID":"567e491a-1ac4-4a7b-bc38-a2c310405476","Type":"ContainerStarted","Data":"a52e0d6517ab45d5e70b036a0b213c557a2cd4dd7d1a8daa0b2211f0993d0a62"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.812764 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.814164 4797 patch_prober.go:28] interesting pod/controller-manager-f4dc55956-mm77f container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.71:8443/healthz\": dial tcp 10.217.0.71:8443: connect: connection refused" start-of-body= Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.814199 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" podUID="567e491a-1ac4-4a7b-bc38-a2c310405476" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.71:8443/healthz\": dial tcp 10.217.0.71:8443: connect: connection refused" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.815338 4797 generic.go:334] "Generic (PLEG): container finished" podID="e1ff8919-8dda-4f12-84bf-78f0014b5ec5" containerID="df8bff9530539bc9e07a759698f68c41a9ed8ffb06b3499b18920f97b035ad69" exitCode=0 Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.815398 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f2t" event={"ID":"e1ff8919-8dda-4f12-84bf-78f0014b5ec5","Type":"ContainerDied","Data":"df8bff9530539bc9e07a759698f68c41a9ed8ffb06b3499b18920f97b035ad69"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.821666 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" event={"ID":"5eb4fc93-ceca-46c9-9a22-2a9562b0580c","Type":"ContainerStarted","Data":"c446edab9610ad2469a5cc45ab194e87ff0e8fa5224e731b729d23e1e307f9fc"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.821710 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" event={"ID":"5eb4fc93-ceca-46c9-9a22-2a9562b0580c","Type":"ContainerStarted","Data":"75f21179af29d1499c68af446862ad2496193253a412689ddfc0dc3e974eed5e"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.822001 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.826222 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zt2ck" podStartSLOduration=3.348697356 podStartE2EDuration="5.826196124s" podCreationTimestamp="2026-01-04 11:54:40 +0000 UTC" firstStartedPulling="2026-01-04 11:54:42.729088243 +0000 UTC m=+381.586274952" lastFinishedPulling="2026-01-04 11:54:45.206587021 +0000 UTC m=+384.063773720" observedRunningTime="2026-01-04 11:54:45.822293474 +0000 UTC m=+384.679480183" watchObservedRunningTime="2026-01-04 11:54:45.826196124 +0000 UTC m=+384.683382833" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.827938 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p48hh" event={"ID":"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1","Type":"ContainerStarted","Data":"544eb70651955230683e857f9425ca52f9541c4696c10ec33d32378793df5fd9"} Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.830041 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.879962 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-85b547dd98-w2fn9" podStartSLOduration=3.879939879 podStartE2EDuration="3.879939879s" podCreationTimestamp="2026-01-04 11:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:54:45.873703364 +0000 UTC m=+384.730890073" watchObservedRunningTime="2026-01-04 11:54:45.879939879 +0000 UTC m=+384.737126588" Jan 04 11:54:45 crc kubenswrapper[4797]: I0104 11:54:45.925075 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" podStartSLOduration=3.925051742 podStartE2EDuration="3.925051742s" podCreationTimestamp="2026-01-04 11:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:54:45.896117466 +0000 UTC m=+384.753304315" watchObservedRunningTime="2026-01-04 11:54:45.925051742 +0000 UTC m=+384.782238451" Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.837332 4797 generic.go:334] "Generic (PLEG): container finished" podID="7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1" containerID="544eb70651955230683e857f9425ca52f9541c4696c10ec33d32378793df5fd9" exitCode=0 Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.837396 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p48hh" event={"ID":"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1","Type":"ContainerDied","Data":"544eb70651955230683e857f9425ca52f9541c4696c10ec33d32378793df5fd9"} Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.837675 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p48hh" event={"ID":"7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1","Type":"ContainerStarted","Data":"d4ff48ac74f8b016ad8abc0a7e162c58e6dfd23518d86b296854a3d58ff69b1a"} Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.839924 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f2t" event={"ID":"e1ff8919-8dda-4f12-84bf-78f0014b5ec5","Type":"ContainerStarted","Data":"87b6eb1921b3ebd7a79d62b57e5403e1d9bd70dc0dbd097f3f3a65dfeeb38721"} Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.844688 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-f4dc55956-mm77f" Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.855813 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p48hh" podStartSLOduration=2.394790579 podStartE2EDuration="3.85579609s" podCreationTimestamp="2026-01-04 11:54:43 +0000 UTC" firstStartedPulling="2026-01-04 11:54:44.789686344 +0000 UTC m=+383.646873043" lastFinishedPulling="2026-01-04 11:54:46.250691845 +0000 UTC m=+385.107878554" observedRunningTime="2026-01-04 11:54:46.853152255 +0000 UTC m=+385.710338964" watchObservedRunningTime="2026-01-04 11:54:46.85579609 +0000 UTC m=+385.712982799" Jan 04 11:54:46 crc kubenswrapper[4797]: I0104 11:54:46.874243 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s6f2t" podStartSLOduration=2.414793341 podStartE2EDuration="4.874222259s" podCreationTimestamp="2026-01-04 11:54:42 +0000 UTC" firstStartedPulling="2026-01-04 11:54:43.743310945 +0000 UTC m=+382.600497644" lastFinishedPulling="2026-01-04 11:54:46.202739853 +0000 UTC m=+385.059926562" observedRunningTime="2026-01-04 11:54:46.874126027 +0000 UTC m=+385.731312746" watchObservedRunningTime="2026-01-04 11:54:46.874222259 +0000 UTC m=+385.731408968" Jan 04 11:54:49 crc kubenswrapper[4797]: I0104 11:54:49.492698 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:54:49 crc kubenswrapper[4797]: I0104 11:54:49.493336 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:54:49 crc kubenswrapper[4797]: I0104 11:54:49.493379 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:54:49 crc kubenswrapper[4797]: I0104 11:54:49.493867 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:54:49 crc kubenswrapper[4797]: I0104 11:54:49.493924 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef" gracePeriod=600 Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.720616 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.720851 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.772760 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.865585 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef" exitCode=0 Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.865703 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef"} Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.866101 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c"} Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.866134 4797 scope.go:117] "RemoveContainer" containerID="2de074651ac046d0109557c28c5509750511a4554777ac6d147b76798436e9d7" Jan 04 11:54:50 crc kubenswrapper[4797]: I0104 11:54:50.915175 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7pzcc" Jan 04 11:54:51 crc kubenswrapper[4797]: I0104 11:54:51.349203 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:51 crc kubenswrapper[4797]: I0104 11:54:51.349521 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:51 crc kubenswrapper[4797]: I0104 11:54:51.397870 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:51 crc kubenswrapper[4797]: I0104 11:54:51.925734 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zt2ck" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.132699 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.133692 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.207859 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.456491 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-jqh8d" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.511826 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.728315 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.728362 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.766963 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.922062 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s6f2t" Jan 04 11:54:53 crc kubenswrapper[4797]: I0104 11:54:53.923395 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p48hh" Jan 04 11:55:18 crc kubenswrapper[4797]: I0104 11:55:18.554213 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" podUID="8ab0052f-8002-48bd-882a-b304ed2b2d91" containerName="registry" containerID="cri-o://8bc5e0eafd0dbf7605057f3fc25f1150265558c84d01317a45d0d969de014b48" gracePeriod=30 Jan 04 11:55:19 crc kubenswrapper[4797]: I0104 11:55:19.051632 4797 generic.go:334] "Generic (PLEG): container finished" podID="8ab0052f-8002-48bd-882a-b304ed2b2d91" containerID="8bc5e0eafd0dbf7605057f3fc25f1150265558c84d01317a45d0d969de014b48" exitCode=0 Jan 04 11:55:19 crc kubenswrapper[4797]: I0104 11:55:19.051767 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" event={"ID":"8ab0052f-8002-48bd-882a-b304ed2b2d91","Type":"ContainerDied","Data":"8bc5e0eafd0dbf7605057f3fc25f1150265558c84d01317a45d0d969de014b48"} Jan 04 11:55:19 crc kubenswrapper[4797]: I0104 11:55:19.980156 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.068721 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" event={"ID":"8ab0052f-8002-48bd-882a-b304ed2b2d91","Type":"ContainerDied","Data":"4e737fc6839c870eaba0033c876d0ad5a88cad9abb0d127755172d29cb82ad35"} Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.068786 4797 scope.go:117] "RemoveContainer" containerID="8bc5e0eafd0dbf7605057f3fc25f1150265558c84d01317a45d0d969de014b48" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.068812 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6whj" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109550 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109641 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109694 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sbn9\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109798 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109848 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109902 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.109936 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.110144 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8ab0052f-8002-48bd-882a-b304ed2b2d91\" (UID: \"8ab0052f-8002-48bd-882a-b304ed2b2d91\") " Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.110820 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.114541 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.120317 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.120440 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9" (OuterVolumeSpecName: "kube-api-access-2sbn9") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "kube-api-access-2sbn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.120774 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.125849 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.128391 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.151839 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8ab0052f-8002-48bd-882a-b304ed2b2d91" (UID: "8ab0052f-8002-48bd-882a-b304ed2b2d91"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212197 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212256 4797 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212275 4797 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8ab0052f-8002-48bd-882a-b304ed2b2d91-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212295 4797 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8ab0052f-8002-48bd-882a-b304ed2b2d91-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212314 4797 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8ab0052f-8002-48bd-882a-b304ed2b2d91-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212331 4797 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.212353 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sbn9\" (UniqueName: \"kubernetes.io/projected/8ab0052f-8002-48bd-882a-b304ed2b2d91-kube-api-access-2sbn9\") on node \"crc\" DevicePath \"\"" Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.423426 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:55:20 crc kubenswrapper[4797]: I0104 11:55:20.431138 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6whj"] Jan 04 11:55:21 crc kubenswrapper[4797]: I0104 11:55:21.488073 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ab0052f-8002-48bd-882a-b304ed2b2d91" path="/var/lib/kubelet/pods/8ab0052f-8002-48bd-882a-b304ed2b2d91/volumes" Jan 04 11:57:19 crc kubenswrapper[4797]: I0104 11:57:19.493290 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:57:19 crc kubenswrapper[4797]: I0104 11:57:19.494138 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:57:49 crc kubenswrapper[4797]: I0104 11:57:49.493619 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:57:49 crc kubenswrapper[4797]: I0104 11:57:49.494222 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:58:19 crc kubenswrapper[4797]: I0104 11:58:19.493904 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:58:19 crc kubenswrapper[4797]: I0104 11:58:19.494513 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:58:19 crc kubenswrapper[4797]: I0104 11:58:19.494566 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 11:58:19 crc kubenswrapper[4797]: I0104 11:58:19.495213 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:58:19 crc kubenswrapper[4797]: I0104 11:58:19.495281 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c" gracePeriod=600 Jan 04 11:58:20 crc kubenswrapper[4797]: I0104 11:58:20.292897 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c" exitCode=0 Jan 04 11:58:20 crc kubenswrapper[4797]: I0104 11:58:20.293082 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c"} Jan 04 11:58:20 crc kubenswrapper[4797]: I0104 11:58:20.293309 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f"} Jan 04 11:58:20 crc kubenswrapper[4797]: I0104 11:58:20.293346 4797 scope.go:117] "RemoveContainer" containerID="77f350045142659c21b1e050cd9816ad49dc2ddec55bdf29f1e666f63703e1ef" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.196981 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj"] Jan 04 12:00:00 crc kubenswrapper[4797]: E0104 12:00:00.197871 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab0052f-8002-48bd-882a-b304ed2b2d91" containerName="registry" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.197891 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab0052f-8002-48bd-882a-b304ed2b2d91" containerName="registry" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.198119 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ab0052f-8002-48bd-882a-b304ed2b2d91" containerName="registry" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.198636 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.201743 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj"] Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.201771 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.201811 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.360620 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.360762 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.360852 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cv7b\" (UniqueName: \"kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.461780 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.461892 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.462138 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cv7b\" (UniqueName: \"kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.463295 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.475344 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.493796 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cv7b\" (UniqueName: \"kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b\") pod \"collect-profiles-29458800-6hplj\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.522077 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.749196 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj"] Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.936261 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" event={"ID":"6916df2d-860b-434d-8949-d624d7d47b9b","Type":"ContainerStarted","Data":"078cdd04b1cb894e37a2ab7c8703bf99faa029c2fc7efa543e45f5c593c405f9"} Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.936744 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" event={"ID":"6916df2d-860b-434d-8949-d624d7d47b9b","Type":"ContainerStarted","Data":"f4cca165eaf74a6a14352cdc0443ba159f5df438c7e8abdbb6e0603e5b2cc435"} Jan 04 12:00:00 crc kubenswrapper[4797]: I0104 12:00:00.956085 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" podStartSLOduration=0.956059244 podStartE2EDuration="956.059244ms" podCreationTimestamp="2026-01-04 12:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:00:00.952372315 +0000 UTC m=+699.809559024" watchObservedRunningTime="2026-01-04 12:00:00.956059244 +0000 UTC m=+699.813245973" Jan 04 12:00:01 crc kubenswrapper[4797]: I0104 12:00:01.952790 4797 generic.go:334] "Generic (PLEG): container finished" podID="6916df2d-860b-434d-8949-d624d7d47b9b" containerID="078cdd04b1cb894e37a2ab7c8703bf99faa029c2fc7efa543e45f5c593c405f9" exitCode=0 Jan 04 12:00:01 crc kubenswrapper[4797]: I0104 12:00:01.952932 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" event={"ID":"6916df2d-860b-434d-8949-d624d7d47b9b","Type":"ContainerDied","Data":"078cdd04b1cb894e37a2ab7c8703bf99faa029c2fc7efa543e45f5c593c405f9"} Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.244027 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.418065 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cv7b\" (UniqueName: \"kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b\") pod \"6916df2d-860b-434d-8949-d624d7d47b9b\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.418228 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume\") pod \"6916df2d-860b-434d-8949-d624d7d47b9b\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.418292 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume\") pod \"6916df2d-860b-434d-8949-d624d7d47b9b\" (UID: \"6916df2d-860b-434d-8949-d624d7d47b9b\") " Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.419130 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume" (OuterVolumeSpecName: "config-volume") pod "6916df2d-860b-434d-8949-d624d7d47b9b" (UID: "6916df2d-860b-434d-8949-d624d7d47b9b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.427170 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b" (OuterVolumeSpecName: "kube-api-access-7cv7b") pod "6916df2d-860b-434d-8949-d624d7d47b9b" (UID: "6916df2d-860b-434d-8949-d624d7d47b9b"). InnerVolumeSpecName "kube-api-access-7cv7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.428272 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6916df2d-860b-434d-8949-d624d7d47b9b" (UID: "6916df2d-860b-434d-8949-d624d7d47b9b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.519730 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6916df2d-860b-434d-8949-d624d7d47b9b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.519756 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cv7b\" (UniqueName: \"kubernetes.io/projected/6916df2d-860b-434d-8949-d624d7d47b9b-kube-api-access-7cv7b\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.519765 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6916df2d-860b-434d-8949-d624d7d47b9b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.968558 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" event={"ID":"6916df2d-860b-434d-8949-d624d7d47b9b","Type":"ContainerDied","Data":"f4cca165eaf74a6a14352cdc0443ba159f5df438c7e8abdbb6e0603e5b2cc435"} Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.968603 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4cca165eaf74a6a14352cdc0443ba159f5df438c7e8abdbb6e0603e5b2cc435" Jan 04 12:00:03 crc kubenswrapper[4797]: I0104 12:00:03.968677 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj" Jan 04 12:00:19 crc kubenswrapper[4797]: I0104 12:00:19.492892 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:00:19 crc kubenswrapper[4797]: I0104 12:00:19.493574 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:00:49 crc kubenswrapper[4797]: I0104 12:00:49.495049 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:00:49 crc kubenswrapper[4797]: I0104 12:00:49.496089 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:00:50 crc kubenswrapper[4797]: I0104 12:00:50.110778 4797 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.159170 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-thvnv"] Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160003 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-controller" containerID="cri-o://0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160059 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="sbdb" containerID="cri-o://979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160100 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160163 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-acl-logging" containerID="cri-o://adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160228 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="nbdb" containerID="cri-o://63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160276 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-node" containerID="cri-o://d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.160230 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="northd" containerID="cri-o://32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.188260 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" containerID="cri-o://524c7a1ca24d99795031a9d26001fe5ca859ae7d5a8f3afc75b12818544455b9" gracePeriod=30 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.409845 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/2.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.410560 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/1.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.410638 4797 generic.go:334] "Generic (PLEG): container finished" podID="91fac858-36ec-4a4b-ba0d-014f6b96b421" containerID="06d0b4ccf7ed92165350b78728e150e285b69b3ba50371a453f160262ef42e1c" exitCode=2 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.410770 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerDied","Data":"06d0b4ccf7ed92165350b78728e150e285b69b3ba50371a453f160262ef42e1c"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.410836 4797 scope.go:117] "RemoveContainer" containerID="65d4f4d56f2010bef8e11c61e19a8e9e249f0213c4f5b844c0c762e7ec86255f" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.411415 4797 scope.go:117] "RemoveContainer" containerID="06d0b4ccf7ed92165350b78728e150e285b69b3ba50371a453f160262ef42e1c" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.412972 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovnkube-controller/3.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417035 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-acl-logging/0.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417470 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-controller/0.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417833 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="524c7a1ca24d99795031a9d26001fe5ca859ae7d5a8f3afc75b12818544455b9" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417860 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417868 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417876 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417884 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417890 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68" exitCode=0 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417896 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9" exitCode=143 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417904 4797 generic.go:334] "Generic (PLEG): container finished" podID="b765f232-404c-4b96-8190-376d4104facc" containerID="0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3" exitCode=143 Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417923 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"524c7a1ca24d99795031a9d26001fe5ca859ae7d5a8f3afc75b12818544455b9"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417946 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417957 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417967 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.417978 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.418006 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.418016 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.418024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.418033 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" event={"ID":"b765f232-404c-4b96-8190-376d4104facc","Type":"ContainerDied","Data":"98e2447b3d2f2f21a94a5a6b86c8fc59340ca70a687849b9352b4719f19e193b"} Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.418041 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98e2447b3d2f2f21a94a5a6b86c8fc59340ca70a687849b9352b4719f19e193b" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.463271 4797 scope.go:117] "RemoveContainer" containerID="6913e8c5200bffe6f3bbfc581998d5db9ca1a333adb49371118b531522b5c08d" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.475665 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-acl-logging/0.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.477185 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-controller/0.log" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.477762 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531603 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-77xk9"] Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531820 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531834 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531848 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="nbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531857 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="nbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531867 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="sbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531875 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="sbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531885 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531891 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531899 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="northd" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531905 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="northd" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531914 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-node" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531920 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-node" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531927 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531933 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531941 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531946 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531952 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531957 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.531965 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6916df2d-860b-434d-8949-d624d7d47b9b" containerName="collect-profiles" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.531972 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6916df2d-860b-434d-8949-d624d7d47b9b" containerName="collect-profiles" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.532020 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532026 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.532036 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kubecfg-setup" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532043 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kubecfg-setup" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.532051 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532057 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: E0104 12:01:07.532067 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-acl-logging" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532074 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-acl-logging" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532156 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532166 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532172 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-node" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532181 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532191 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-acl-logging" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532200 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="sbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532208 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovn-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532214 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6916df2d-860b-434d-8949-d624d7d47b9b" containerName="collect-profiles" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532220 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="nbdb" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532226 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532232 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="northd" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532385 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.532394 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b765f232-404c-4b96-8190-376d4104facc" containerName="ovnkube-controller" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.533884 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628051 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62nns\" (UniqueName: \"kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628113 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628146 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628178 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628207 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628234 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628282 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628310 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628354 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628385 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628413 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628437 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628463 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628503 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628525 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628543 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628570 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628589 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628610 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628638 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units\") pod \"b765f232-404c-4b96-8190-376d4104facc\" (UID: \"b765f232-404c-4b96-8190-376d4104facc\") " Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628797 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-env-overrides\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628851 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-netd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628890 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-etc-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628919 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-config\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628947 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629009 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-log-socket\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628235 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log" (OuterVolumeSpecName: "node-log") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629058 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-var-lib-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628667 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628946 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.628978 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629014 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629111 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629133 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629260 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629303 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629333 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629364 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629389 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629392 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash" (OuterVolumeSpecName: "host-slash") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629397 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-node-log\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629435 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629459 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629484 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-slash\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629496 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket" (OuterVolumeSpecName: "log-socket") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629517 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-systemd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629611 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-script-lib\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629684 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj8c9\" (UniqueName: \"kubernetes.io/projected/ce3e8d67-6307-4303-96c0-8c5afdd58134-kube-api-access-gj8c9\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629747 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629762 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-ovn\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.629918 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-kubelet\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630017 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630083 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovn-node-metrics-cert\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630175 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-bin\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630252 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630330 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-systemd-units\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630394 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-netns\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630525 4797 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-node-log\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630586 4797 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630644 4797 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630696 4797 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630743 4797 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630794 4797 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630872 4797 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b765f232-404c-4b96-8190-376d4104facc-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630929 4797 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.630998 4797 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631057 4797 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-log-socket\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631107 4797 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631159 4797 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631211 4797 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631266 4797 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631317 4797 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631370 4797 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-host-slash\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.631422 4797 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.636978 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns" (OuterVolumeSpecName: "kube-api-access-62nns") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "kube-api-access-62nns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.637247 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.651725 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b765f232-404c-4b96-8190-376d4104facc" (UID: "b765f232-404c-4b96-8190-376d4104facc"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732111 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-bin\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732235 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732257 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-systemd-units\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732199 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-bin\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732314 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-netns\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732335 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-netd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732349 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-env-overrides\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732429 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-etc-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732449 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-config\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732467 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732490 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-log-socket\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732511 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-var-lib-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732534 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-node-log\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732556 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-slash\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732569 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-systemd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732589 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-script-lib\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732604 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj8c9\" (UniqueName: \"kubernetes.io/projected/ce3e8d67-6307-4303-96c0-8c5afdd58134-kube-api-access-gj8c9\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732621 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-ovn\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732638 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-kubelet\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732656 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732672 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovn-node-metrics-cert\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732703 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62nns\" (UniqueName: \"kubernetes.io/projected/b765f232-404c-4b96-8190-376d4104facc-kube-api-access-62nns\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732715 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b765f232-404c-4b96-8190-376d4104facc-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732725 4797 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b765f232-404c-4b96-8190-376d4104facc-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732389 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-systemd-units\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733303 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-etc-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732390 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733345 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-env-overrides\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733444 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-slash\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.732405 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-cni-netd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733511 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733557 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-log-socket\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733596 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-var-lib-openvswitch\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733640 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-node-log\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733767 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-config\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733805 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-ovn\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733830 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-kubelet\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733851 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.733873 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-run-systemd\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.734097 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ce3e8d67-6307-4303-96c0-8c5afdd58134-host-run-netns\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.734432 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovnkube-script-lib\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.738129 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ce3e8d67-6307-4303-96c0-8c5afdd58134-ovn-node-metrics-cert\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.762810 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj8c9\" (UniqueName: \"kubernetes.io/projected/ce3e8d67-6307-4303-96c0-8c5afdd58134-kube-api-access-gj8c9\") pod \"ovnkube-node-77xk9\" (UID: \"ce3e8d67-6307-4303-96c0-8c5afdd58134\") " pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: I0104 12:01:07.863936 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:07 crc kubenswrapper[4797]: W0104 12:01:07.915545 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce3e8d67_6307_4303_96c0_8c5afdd58134.slice/crio-d283f853c3725a4e989cf2d39c1f2cb95d683ccb5cae22a575c35a94e8ee7c79 WatchSource:0}: Error finding container d283f853c3725a4e989cf2d39c1f2cb95d683ccb5cae22a575c35a94e8ee7c79: Status 404 returned error can't find the container with id d283f853c3725a4e989cf2d39c1f2cb95d683ccb5cae22a575c35a94e8ee7c79 Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.432810 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-acl-logging/0.log" Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.435189 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-thvnv_b765f232-404c-4b96-8190-376d4104facc/ovn-controller/0.log" Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.436395 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-thvnv" Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.464313 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xwctk_91fac858-36ec-4a4b-ba0d-014f6b96b421/kube-multus/2.log" Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.464542 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xwctk" event={"ID":"91fac858-36ec-4a4b-ba0d-014f6b96b421","Type":"ContainerStarted","Data":"fe35a86d3fb1d3a42c0e76f0324bf27bad7368ba47eb3531411aed9d3fab769f"} Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.471319 4797 generic.go:334] "Generic (PLEG): container finished" podID="ce3e8d67-6307-4303-96c0-8c5afdd58134" containerID="75b1f5881014507f1377429c996b0571895659397b8a4c691e8d48c61db524e8" exitCode=0 Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.471449 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerDied","Data":"75b1f5881014507f1377429c996b0571895659397b8a4c691e8d48c61db524e8"} Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.471627 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"d283f853c3725a4e989cf2d39c1f2cb95d683ccb5cae22a575c35a94e8ee7c79"} Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.617141 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-thvnv"] Jan 04 12:01:08 crc kubenswrapper[4797]: I0104 12:01:08.618984 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-thvnv"] Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.483885 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b765f232-404c-4b96-8190-376d4104facc" path="/var/lib/kubelet/pods/b765f232-404c-4b96-8190-376d4104facc/volumes" Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486109 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"bab3df55477ea9f624131e5a66ae2ed3806babba4abe37187e95b978c7e8e5da"} Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486144 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"cc2b439d788e1cd69397f1056f47880fc111006020c65863bf6b9ca74b9c6d3d"} Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486158 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"332f5a11f93e06fb07189f8fc636e8a6ef4872a2beec34f293a87b39b0863df4"} Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486171 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"3ba3505fdaf82adcbe154bb804d7e5b1369e4da5fb66aab4ddd7b6c9513b845b"} Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486186 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"a9d7163fff3887695d9e1978a990d4c77567eb1bf4d9a56824e4f2203c8cc768"} Jan 04 12:01:09 crc kubenswrapper[4797]: I0104 12:01:09.486197 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"656be19adb851c636fa56c04fa18b28fd3156eb426e049163ef143073d66bdb2"} Jan 04 12:01:11 crc kubenswrapper[4797]: I0104 12:01:11.500162 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"b12311790bba2b81e990a250703d1449350e65b88504f1d42df35f9b26f01256"} Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.415152 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-jlv9q"] Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.416419 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.419036 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.419217 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.419358 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.419657 4797 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-h94rw" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.522392 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" event={"ID":"ce3e8d67-6307-4303-96c0-8c5afdd58134","Type":"ContainerStarted","Data":"5ec17f8bab004484e6bca86340ac20edcb559f7796bdf81f5fb114a4a7f1b646"} Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.522666 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.529296 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.529371 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.529418 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwjrx\" (UniqueName: \"kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.547096 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.557579 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" podStartSLOduration=7.557556333 podStartE2EDuration="7.557556333s" podCreationTimestamp="2026-01-04 12:01:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:01:14.555005605 +0000 UTC m=+773.412192314" watchObservedRunningTime="2026-01-04 12:01:14.557556333 +0000 UTC m=+773.414743052" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.634730 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwjrx\" (UniqueName: \"kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.635209 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.635269 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.635478 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.636489 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.670970 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwjrx\" (UniqueName: \"kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx\") pod \"crc-storage-crc-jlv9q\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: I0104 12:01:14.738167 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: E0104 12:01:14.771791 4797 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(b5f5901cd184c77d883b72d6ca3019e1bd8f08f63af624005a74b9ace633ec3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:01:14 crc kubenswrapper[4797]: E0104 12:01:14.771865 4797 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(b5f5901cd184c77d883b72d6ca3019e1bd8f08f63af624005a74b9ace633ec3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: E0104 12:01:14.771900 4797 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(b5f5901cd184c77d883b72d6ca3019e1bd8f08f63af624005a74b9ace633ec3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:14 crc kubenswrapper[4797]: E0104 12:01:14.771961 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-jlv9q_crc-storage(d1329c03-9677-41bd-8589-615055e84272)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-jlv9q_crc-storage(d1329c03-9677-41bd-8589-615055e84272)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(b5f5901cd184c77d883b72d6ca3019e1bd8f08f63af624005a74b9ace633ec3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-jlv9q" podUID="d1329c03-9677-41bd-8589-615055e84272" Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.367396 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jlv9q"] Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.529646 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.530181 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.530226 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.530249 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:15 crc kubenswrapper[4797]: I0104 12:01:15.567354 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:15 crc kubenswrapper[4797]: E0104 12:01:15.573235 4797 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(dbbfc189d26e406598d246f71b662f9170a9e2d72516e204e4045e7629498980): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:01:15 crc kubenswrapper[4797]: E0104 12:01:15.573289 4797 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(dbbfc189d26e406598d246f71b662f9170a9e2d72516e204e4045e7629498980): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:15 crc kubenswrapper[4797]: E0104 12:01:15.573312 4797 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(dbbfc189d26e406598d246f71b662f9170a9e2d72516e204e4045e7629498980): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:15 crc kubenswrapper[4797]: E0104 12:01:15.573356 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-jlv9q_crc-storage(d1329c03-9677-41bd-8589-615055e84272)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-jlv9q_crc-storage(d1329c03-9677-41bd-8589-615055e84272)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-jlv9q_crc-storage_d1329c03-9677-41bd-8589-615055e84272_0(dbbfc189d26e406598d246f71b662f9170a9e2d72516e204e4045e7629498980): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-jlv9q" podUID="d1329c03-9677-41bd-8589-615055e84272" Jan 04 12:01:19 crc kubenswrapper[4797]: I0104 12:01:19.493479 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:01:19 crc kubenswrapper[4797]: I0104 12:01:19.493932 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:01:19 crc kubenswrapper[4797]: I0104 12:01:19.494031 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:01:19 crc kubenswrapper[4797]: I0104 12:01:19.495060 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:01:19 crc kubenswrapper[4797]: I0104 12:01:19.495173 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f" gracePeriod=600 Jan 04 12:01:20 crc kubenswrapper[4797]: I0104 12:01:20.567966 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f" exitCode=0 Jan 04 12:01:20 crc kubenswrapper[4797]: I0104 12:01:20.568316 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f"} Jan 04 12:01:20 crc kubenswrapper[4797]: I0104 12:01:20.568857 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c"} Jan 04 12:01:20 crc kubenswrapper[4797]: I0104 12:01:20.568898 4797 scope.go:117] "RemoveContainer" containerID="f643f98bdd7bf140f1f61ab71b934dc7e00e570152d578dae157494852da556c" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.823197 4797 scope.go:117] "RemoveContainer" containerID="172334d5c693006ccf865c0649fe6283c71b1dc7156ab324e9a4c083e0dfbac6" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.848608 4797 scope.go:117] "RemoveContainer" containerID="d15ff2cbdd168e42bb939b405f8a4e829f2e68fcba82896a5afd3e462e03fa68" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.869629 4797 scope.go:117] "RemoveContainer" containerID="5fd595ba22d1b879a378b203e700b5a598af1b392a6d58a26902718e80fd3044" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.885656 4797 scope.go:117] "RemoveContainer" containerID="0015bba15cfdaadfc162fc56eae651f49f98b8ac3ad7eacdc1075dc955976bb3" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.907176 4797 scope.go:117] "RemoveContainer" containerID="524c7a1ca24d99795031a9d26001fe5ca859ae7d5a8f3afc75b12818544455b9" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.926739 4797 scope.go:117] "RemoveContainer" containerID="adad3a640bc8f8d60141fe78c4cbf0a6c5acd0cc30d9e9f756d5fd73a8246ed9" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.941134 4797 scope.go:117] "RemoveContainer" containerID="63fbb3429a1481316dfab4df98bdb928cfcc8f3d764f2365442488001f24bf52" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.956332 4797 scope.go:117] "RemoveContainer" containerID="32cfa735a3e3912813c7de43adb7374d64fb07ad55098037b0a83fbc766b98b9" Jan 04 12:01:21 crc kubenswrapper[4797]: I0104 12:01:21.974172 4797 scope.go:117] "RemoveContainer" containerID="979bd39738c85fcf4bc3f5171951e9197ada1431b06e4940c218379162376d1d" Jan 04 12:01:28 crc kubenswrapper[4797]: I0104 12:01:28.473249 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:28 crc kubenswrapper[4797]: I0104 12:01:28.474558 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:28 crc kubenswrapper[4797]: I0104 12:01:28.778509 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jlv9q"] Jan 04 12:01:28 crc kubenswrapper[4797]: I0104 12:01:28.785771 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:01:29 crc kubenswrapper[4797]: I0104 12:01:29.630091 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jlv9q" event={"ID":"d1329c03-9677-41bd-8589-615055e84272","Type":"ContainerStarted","Data":"a66283b8d66563fc58d66adb94cfa5aa4f8675283f55b043738d0ae6a6942129"} Jan 04 12:01:30 crc kubenswrapper[4797]: I0104 12:01:30.638016 4797 generic.go:334] "Generic (PLEG): container finished" podID="d1329c03-9677-41bd-8589-615055e84272" containerID="f0edabeb17a07489a9221c52ea31e711940821ff1d3df41f58b17e88058ec99b" exitCode=0 Jan 04 12:01:30 crc kubenswrapper[4797]: I0104 12:01:30.638107 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jlv9q" event={"ID":"d1329c03-9677-41bd-8589-615055e84272","Type":"ContainerDied","Data":"f0edabeb17a07489a9221c52ea31e711940821ff1d3df41f58b17e88058ec99b"} Jan 04 12:01:31 crc kubenswrapper[4797]: I0104 12:01:31.949064 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:31 crc kubenswrapper[4797]: I0104 12:01:31.998162 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt\") pod \"d1329c03-9677-41bd-8589-615055e84272\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " Jan 04 12:01:31 crc kubenswrapper[4797]: I0104 12:01:31.998270 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwjrx\" (UniqueName: \"kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx\") pod \"d1329c03-9677-41bd-8589-615055e84272\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " Jan 04 12:01:31 crc kubenswrapper[4797]: I0104 12:01:31.998313 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage\") pod \"d1329c03-9677-41bd-8589-615055e84272\" (UID: \"d1329c03-9677-41bd-8589-615055e84272\") " Jan 04 12:01:31 crc kubenswrapper[4797]: I0104 12:01:31.998538 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d1329c03-9677-41bd-8589-615055e84272" (UID: "d1329c03-9677-41bd-8589-615055e84272"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.005750 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx" (OuterVolumeSpecName: "kube-api-access-dwjrx") pod "d1329c03-9677-41bd-8589-615055e84272" (UID: "d1329c03-9677-41bd-8589-615055e84272"). InnerVolumeSpecName "kube-api-access-dwjrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.015287 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d1329c03-9677-41bd-8589-615055e84272" (UID: "d1329c03-9677-41bd-8589-615055e84272"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.099581 4797 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d1329c03-9677-41bd-8589-615055e84272-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.099620 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwjrx\" (UniqueName: \"kubernetes.io/projected/d1329c03-9677-41bd-8589-615055e84272-kube-api-access-dwjrx\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.099631 4797 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d1329c03-9677-41bd-8589-615055e84272-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.652604 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jlv9q" event={"ID":"d1329c03-9677-41bd-8589-615055e84272","Type":"ContainerDied","Data":"a66283b8d66563fc58d66adb94cfa5aa4f8675283f55b043738d0ae6a6942129"} Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.652934 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a66283b8d66563fc58d66adb94cfa5aa4f8675283f55b043738d0ae6a6942129" Jan 04 12:01:32 crc kubenswrapper[4797]: I0104 12:01:32.652662 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jlv9q" Jan 04 12:01:37 crc kubenswrapper[4797]: I0104 12:01:37.905603 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-77xk9" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.442150 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w"] Jan 04 12:01:40 crc kubenswrapper[4797]: E0104 12:01:40.442721 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1329c03-9677-41bd-8589-615055e84272" containerName="storage" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.442741 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1329c03-9677-41bd-8589-615055e84272" containerName="storage" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.442922 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1329c03-9677-41bd-8589-615055e84272" containerName="storage" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.444146 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.447489 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.457191 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w"] Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.510535 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.510749 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg6kn\" (UniqueName: \"kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.510928 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.612086 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg6kn\" (UniqueName: \"kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.612151 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.612181 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.612929 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.612983 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.635371 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg6kn\" (UniqueName: \"kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:40 crc kubenswrapper[4797]: I0104 12:01:40.770583 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:41 crc kubenswrapper[4797]: I0104 12:01:41.216832 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w"] Jan 04 12:01:41 crc kubenswrapper[4797]: I0104 12:01:41.711445 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerStarted","Data":"de10864566d824009d398863e601114dbfce5525dca0c258243c2bf5584d0bcb"} Jan 04 12:01:41 crc kubenswrapper[4797]: I0104 12:01:41.711522 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerStarted","Data":"74e29548eed1152c6295d4eacf5ea216dd2d7502fb7898fb4fd7cf9dd93260af"} Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.721373 4797 generic.go:334] "Generic (PLEG): container finished" podID="81203228-c493-4cf1-9299-f5d46acba957" containerID="de10864566d824009d398863e601114dbfce5525dca0c258243c2bf5584d0bcb" exitCode=0 Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.721457 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerDied","Data":"de10864566d824009d398863e601114dbfce5525dca0c258243c2bf5584d0bcb"} Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.772061 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.774383 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.796960 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.944902 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.944978 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4txn\" (UniqueName: \"kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:42 crc kubenswrapper[4797]: I0104 12:01:42.945202 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.046488 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.046554 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.046597 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4txn\" (UniqueName: \"kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.047141 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.047548 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.075720 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4txn\" (UniqueName: \"kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn\") pod \"redhat-operators-zfdcm\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.121393 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.331754 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:01:43 crc kubenswrapper[4797]: W0104 12:01:43.337044 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67266054_24cd_443d_9448_184492580b64.slice/crio-ddb42cd50f1248f7135ebf2c8cbd027864f3077f2baa9e8447e781d9e62d4445 WatchSource:0}: Error finding container ddb42cd50f1248f7135ebf2c8cbd027864f3077f2baa9e8447e781d9e62d4445: Status 404 returned error can't find the container with id ddb42cd50f1248f7135ebf2c8cbd027864f3077f2baa9e8447e781d9e62d4445 Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.729588 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerStarted","Data":"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77"} Jan 04 12:01:43 crc kubenswrapper[4797]: I0104 12:01:43.729845 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerStarted","Data":"ddb42cd50f1248f7135ebf2c8cbd027864f3077f2baa9e8447e781d9e62d4445"} Jan 04 12:01:44 crc kubenswrapper[4797]: I0104 12:01:44.739571 4797 generic.go:334] "Generic (PLEG): container finished" podID="67266054-24cd-443d-9448-184492580b64" containerID="ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77" exitCode=0 Jan 04 12:01:44 crc kubenswrapper[4797]: I0104 12:01:44.740932 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerDied","Data":"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77"} Jan 04 12:01:44 crc kubenswrapper[4797]: I0104 12:01:44.743486 4797 generic.go:334] "Generic (PLEG): container finished" podID="81203228-c493-4cf1-9299-f5d46acba957" containerID="f25d8beb463108778c7ac4060856b6600aa527a1efda85366373ff0fe578bfdb" exitCode=0 Jan 04 12:01:44 crc kubenswrapper[4797]: I0104 12:01:44.743538 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerDied","Data":"f25d8beb463108778c7ac4060856b6600aa527a1efda85366373ff0fe578bfdb"} Jan 04 12:01:45 crc kubenswrapper[4797]: I0104 12:01:45.755454 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerStarted","Data":"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300"} Jan 04 12:01:45 crc kubenswrapper[4797]: I0104 12:01:45.760460 4797 generic.go:334] "Generic (PLEG): container finished" podID="81203228-c493-4cf1-9299-f5d46acba957" containerID="564bf6ceeedd3808b25247294c8eaf6ff0918b08516a1d146edd2cec26fe252d" exitCode=0 Jan 04 12:01:45 crc kubenswrapper[4797]: I0104 12:01:45.760511 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerDied","Data":"564bf6ceeedd3808b25247294c8eaf6ff0918b08516a1d146edd2cec26fe252d"} Jan 04 12:01:46 crc kubenswrapper[4797]: I0104 12:01:46.776030 4797 generic.go:334] "Generic (PLEG): container finished" podID="67266054-24cd-443d-9448-184492580b64" containerID="d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300" exitCode=0 Jan 04 12:01:46 crc kubenswrapper[4797]: I0104 12:01:46.776126 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerDied","Data":"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300"} Jan 04 12:01:46 crc kubenswrapper[4797]: I0104 12:01:46.992943 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.102428 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util\") pod \"81203228-c493-4cf1-9299-f5d46acba957\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.102518 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle\") pod \"81203228-c493-4cf1-9299-f5d46acba957\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.102681 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg6kn\" (UniqueName: \"kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn\") pod \"81203228-c493-4cf1-9299-f5d46acba957\" (UID: \"81203228-c493-4cf1-9299-f5d46acba957\") " Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.103493 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle" (OuterVolumeSpecName: "bundle") pod "81203228-c493-4cf1-9299-f5d46acba957" (UID: "81203228-c493-4cf1-9299-f5d46acba957"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.116347 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn" (OuterVolumeSpecName: "kube-api-access-vg6kn") pod "81203228-c493-4cf1-9299-f5d46acba957" (UID: "81203228-c493-4cf1-9299-f5d46acba957"). InnerVolumeSpecName "kube-api-access-vg6kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.117320 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util" (OuterVolumeSpecName: "util") pod "81203228-c493-4cf1-9299-f5d46acba957" (UID: "81203228-c493-4cf1-9299-f5d46acba957"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.204829 4797 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.205142 4797 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81203228-c493-4cf1-9299-f5d46acba957-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.205294 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg6kn\" (UniqueName: \"kubernetes.io/projected/81203228-c493-4cf1-9299-f5d46acba957-kube-api-access-vg6kn\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.785708 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerStarted","Data":"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0"} Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.788550 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" event={"ID":"81203228-c493-4cf1-9299-f5d46acba957","Type":"ContainerDied","Data":"74e29548eed1152c6295d4eacf5ea216dd2d7502fb7898fb4fd7cf9dd93260af"} Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.788583 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74e29548eed1152c6295d4eacf5ea216dd2d7502fb7898fb4fd7cf9dd93260af" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.788653 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w" Jan 04 12:01:47 crc kubenswrapper[4797]: I0104 12:01:47.816328 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zfdcm" podStartSLOduration=3.223717769 podStartE2EDuration="5.816304907s" podCreationTimestamp="2026-01-04 12:01:42 +0000 UTC" firstStartedPulling="2026-01-04 12:01:44.742027334 +0000 UTC m=+803.599214083" lastFinishedPulling="2026-01-04 12:01:47.334614502 +0000 UTC m=+806.191801221" observedRunningTime="2026-01-04 12:01:47.810648118 +0000 UTC m=+806.667834867" watchObservedRunningTime="2026-01-04 12:01:47.816304907 +0000 UTC m=+806.673491646" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.894322 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-2w96k"] Jan 04 12:01:50 crc kubenswrapper[4797]: E0104 12:01:50.894709 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="extract" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.894720 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="extract" Jan 04 12:01:50 crc kubenswrapper[4797]: E0104 12:01:50.894729 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="util" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.894735 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="util" Jan 04 12:01:50 crc kubenswrapper[4797]: E0104 12:01:50.894744 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="pull" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.894749 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="pull" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.894848 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="81203228-c493-4cf1-9299-f5d46acba957" containerName="extract" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.895206 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.897225 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.897884 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-9gq9j" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.900892 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 04 12:01:50 crc kubenswrapper[4797]: I0104 12:01:50.916145 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-2w96k"] Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.059863 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2frpm\" (UniqueName: \"kubernetes.io/projected/7f178f28-d4be-4076-b5b0-549a693eae61-kube-api-access-2frpm\") pod \"nmstate-operator-6769fb99d-2w96k\" (UID: \"7f178f28-d4be-4076-b5b0-549a693eae61\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.160746 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2frpm\" (UniqueName: \"kubernetes.io/projected/7f178f28-d4be-4076-b5b0-549a693eae61-kube-api-access-2frpm\") pod \"nmstate-operator-6769fb99d-2w96k\" (UID: \"7f178f28-d4be-4076-b5b0-549a693eae61\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.180191 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2frpm\" (UniqueName: \"kubernetes.io/projected/7f178f28-d4be-4076-b5b0-549a693eae61-kube-api-access-2frpm\") pod \"nmstate-operator-6769fb99d-2w96k\" (UID: \"7f178f28-d4be-4076-b5b0-549a693eae61\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.210704 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.393636 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-2w96k"] Jan 04 12:01:51 crc kubenswrapper[4797]: W0104 12:01:51.400896 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f178f28_d4be_4076_b5b0_549a693eae61.slice/crio-b2ff4da2f93406b9cb79a4a63459fea3b1d40272fb96f3f8f73ae9374b3d4a87 WatchSource:0}: Error finding container b2ff4da2f93406b9cb79a4a63459fea3b1d40272fb96f3f8f73ae9374b3d4a87: Status 404 returned error can't find the container with id b2ff4da2f93406b9cb79a4a63459fea3b1d40272fb96f3f8f73ae9374b3d4a87 Jan 04 12:01:51 crc kubenswrapper[4797]: I0104 12:01:51.824719 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" event={"ID":"7f178f28-d4be-4076-b5b0-549a693eae61","Type":"ContainerStarted","Data":"b2ff4da2f93406b9cb79a4a63459fea3b1d40272fb96f3f8f73ae9374b3d4a87"} Jan 04 12:01:53 crc kubenswrapper[4797]: I0104 12:01:53.122443 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:53 crc kubenswrapper[4797]: I0104 12:01:53.122755 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:01:54 crc kubenswrapper[4797]: I0104 12:01:54.171021 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zfdcm" podUID="67266054-24cd-443d-9448-184492580b64" containerName="registry-server" probeResult="failure" output=< Jan 04 12:01:54 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 12:01:54 crc kubenswrapper[4797]: > Jan 04 12:01:55 crc kubenswrapper[4797]: I0104 12:01:55.853479 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" event={"ID":"7f178f28-d4be-4076-b5b0-549a693eae61","Type":"ContainerStarted","Data":"0f9a196ffa70d600cd765b60487cb8b83bffa75a7670b5f569a08b54fc4f4825"} Jan 04 12:01:55 crc kubenswrapper[4797]: I0104 12:01:55.877706 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-2w96k" podStartSLOduration=2.354499893 podStartE2EDuration="5.877688118s" podCreationTimestamp="2026-01-04 12:01:50 +0000 UTC" firstStartedPulling="2026-01-04 12:01:51.403071039 +0000 UTC m=+810.260257758" lastFinishedPulling="2026-01-04 12:01:54.926259244 +0000 UTC m=+813.783445983" observedRunningTime="2026-01-04 12:01:55.877649607 +0000 UTC m=+814.734836356" watchObservedRunningTime="2026-01-04 12:01:55.877688118 +0000 UTC m=+814.734874837" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.743077 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.744799 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.747951 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-4t8cc" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.766634 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.775052 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.775848 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.780402 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.805311 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-x8f7b"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.810208 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.838306 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.912658 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz"] Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.913669 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.919303 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.919481 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.919435 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bqqz7" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922193 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-nmstate-lock\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922320 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqqn5\" (UniqueName: \"kubernetes.io/projected/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-kube-api-access-tqqn5\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922434 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-dbus-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922535 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-ovs-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922640 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct987\" (UniqueName: \"kubernetes.io/projected/971f370a-a8c4-409f-bef8-d1c3e8bc048b-kube-api-access-ct987\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922755 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9qnb\" (UniqueName: \"kubernetes.io/projected/2e39d5cd-01ae-48c3-9281-177a2b2591d9-kube-api-access-v9qnb\") pod \"nmstate-metrics-7f7f7578db-5j8jd\" (UID: \"2e39d5cd-01ae-48c3-9281-177a2b2591d9\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.922833 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:00 crc kubenswrapper[4797]: I0104 12:02:00.929600 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz"] Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.023945 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqqn5\" (UniqueName: \"kubernetes.io/projected/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-kube-api-access-tqqn5\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024004 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1b22c6d0-408c-4506-a7aa-b444f4798cb9-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024049 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-dbus-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024317 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-dbus-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024346 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-ovs-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024360 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-ovs-socket\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024507 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l57wj\" (UniqueName: \"kubernetes.io/projected/1b22c6d0-408c-4506-a7aa-b444f4798cb9-kube-api-access-l57wj\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024624 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct987\" (UniqueName: \"kubernetes.io/projected/971f370a-a8c4-409f-bef8-d1c3e8bc048b-kube-api-access-ct987\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024693 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9qnb\" (UniqueName: \"kubernetes.io/projected/2e39d5cd-01ae-48c3-9281-177a2b2591d9-kube-api-access-v9qnb\") pod \"nmstate-metrics-7f7f7578db-5j8jd\" (UID: \"2e39d5cd-01ae-48c3-9281-177a2b2591d9\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024768 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024851 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b22c6d0-408c-4506-a7aa-b444f4798cb9-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024924 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-nmstate-lock\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.024964 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/971f370a-a8c4-409f-bef8-d1c3e8bc048b-nmstate-lock\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: E0104 12:02:01.024930 4797 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 04 12:02:01 crc kubenswrapper[4797]: E0104 12:02:01.025125 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair podName:2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d nodeName:}" failed. No retries permitted until 2026-01-04 12:02:01.525093718 +0000 UTC m=+820.382280467 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair") pod "nmstate-webhook-f8fb84555-fvdtc" (UID: "2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d") : secret "openshift-nmstate-webhook" not found Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.050540 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct987\" (UniqueName: \"kubernetes.io/projected/971f370a-a8c4-409f-bef8-d1c3e8bc048b-kube-api-access-ct987\") pod \"nmstate-handler-x8f7b\" (UID: \"971f370a-a8c4-409f-bef8-d1c3e8bc048b\") " pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.060054 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqqn5\" (UniqueName: \"kubernetes.io/projected/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-kube-api-access-tqqn5\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.065493 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9qnb\" (UniqueName: \"kubernetes.io/projected/2e39d5cd-01ae-48c3-9281-177a2b2591d9-kube-api-access-v9qnb\") pod \"nmstate-metrics-7f7f7578db-5j8jd\" (UID: \"2e39d5cd-01ae-48c3-9281-177a2b2591d9\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.102251 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-55fd9d56b-vhwvm"] Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.103122 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.115447 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-55fd9d56b-vhwvm"] Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.126156 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b22c6d0-408c-4506-a7aa-b444f4798cb9-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.126213 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1b22c6d0-408c-4506-a7aa-b444f4798cb9-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.126250 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l57wj\" (UniqueName: \"kubernetes.io/projected/1b22c6d0-408c-4506-a7aa-b444f4798cb9-kube-api-access-l57wj\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.127294 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1b22c6d0-408c-4506-a7aa-b444f4798cb9-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.127626 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.129413 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1b22c6d0-408c-4506-a7aa-b444f4798cb9-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.146365 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l57wj\" (UniqueName: \"kubernetes.io/projected/1b22c6d0-408c-4506-a7aa-b444f4798cb9-kube-api-access-l57wj\") pod \"nmstate-console-plugin-6ff7998486-52zcz\" (UID: \"1b22c6d0-408c-4506-a7aa-b444f4798cb9\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.227658 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.227823 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-console-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.227907 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-service-ca\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.227957 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8f7k\" (UniqueName: \"kubernetes.io/projected/090badac-2163-437b-b1d4-a39938a1b82b-kube-api-access-v8f7k\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.228090 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-trusted-ca-bundle\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.228232 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-oauth-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.228322 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-oauth-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.255170 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330178 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-trusted-ca-bundle\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330708 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-oauth-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330780 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-oauth-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330876 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330922 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-console-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.330966 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-service-ca\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.331038 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8f7k\" (UniqueName: \"kubernetes.io/projected/090badac-2163-437b-b1d4-a39938a1b82b-kube-api-access-v8f7k\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.331698 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-service-ca\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.331922 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-console-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.332185 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-trusted-ca-bundle\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.332823 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/090badac-2163-437b-b1d4-a39938a1b82b-oauth-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.335823 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-oauth-config\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.343857 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/090badac-2163-437b-b1d4-a39938a1b82b-console-serving-cert\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.349192 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8f7k\" (UniqueName: \"kubernetes.io/projected/090badac-2163-437b-b1d4-a39938a1b82b-kube-api-access-v8f7k\") pod \"console-55fd9d56b-vhwvm\" (UID: \"090badac-2163-437b-b1d4-a39938a1b82b\") " pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.361881 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.418729 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.467241 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz"] Jan 04 12:02:01 crc kubenswrapper[4797]: W0104 12:02:01.482482 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b22c6d0_408c_4506_a7aa_b444f4798cb9.slice/crio-9ca84d6d6ee07ddf4bab71eae6663a742f744f1a6a27805e66f1f6eab62234d9 WatchSource:0}: Error finding container 9ca84d6d6ee07ddf4bab71eae6663a742f744f1a6a27805e66f1f6eab62234d9: Status 404 returned error can't find the container with id 9ca84d6d6ee07ddf4bab71eae6663a742f744f1a6a27805e66f1f6eab62234d9 Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.533558 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.537463 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-fvdtc\" (UID: \"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.593795 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd"] Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.628289 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-55fd9d56b-vhwvm"] Jan 04 12:02:01 crc kubenswrapper[4797]: W0104 12:02:01.634269 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod090badac_2163_437b_b1d4_a39938a1b82b.slice/crio-306ef657be7fcff71c49024da4024c53356b0982b78e283e1222fa580b534a76 WatchSource:0}: Error finding container 306ef657be7fcff71c49024da4024c53356b0982b78e283e1222fa580b534a76: Status 404 returned error can't find the container with id 306ef657be7fcff71c49024da4024c53356b0982b78e283e1222fa580b534a76 Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.699662 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.889856 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc"] Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.895623 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" event={"ID":"2e39d5cd-01ae-48c3-9281-177a2b2591d9","Type":"ContainerStarted","Data":"fe76b432eecc7be59b578e60e0279c7f5db7238d1fd78606f06d8ebea243e199"} Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.897118 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-55fd9d56b-vhwvm" event={"ID":"090badac-2163-437b-b1d4-a39938a1b82b","Type":"ContainerStarted","Data":"306ef657be7fcff71c49024da4024c53356b0982b78e283e1222fa580b534a76"} Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.898494 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" event={"ID":"1b22c6d0-408c-4506-a7aa-b444f4798cb9","Type":"ContainerStarted","Data":"9ca84d6d6ee07ddf4bab71eae6663a742f744f1a6a27805e66f1f6eab62234d9"} Jan 04 12:02:01 crc kubenswrapper[4797]: I0104 12:02:01.899739 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-x8f7b" event={"ID":"971f370a-a8c4-409f-bef8-d1c3e8bc048b","Type":"ContainerStarted","Data":"66ab8c4aea6641ea8d869c3acafcdcd9b21ca447b0dfdcdf2efdab19061d2044"} Jan 04 12:02:02 crc kubenswrapper[4797]: I0104 12:02:02.906176 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" event={"ID":"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d","Type":"ContainerStarted","Data":"caecfdaf675eb0bdb28b6a5df09cbedc5ed7d128f8f836a733458c25e984ad2e"} Jan 04 12:02:02 crc kubenswrapper[4797]: I0104 12:02:02.907817 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-55fd9d56b-vhwvm" event={"ID":"090badac-2163-437b-b1d4-a39938a1b82b","Type":"ContainerStarted","Data":"f6515e7d13ae5052b71e8c1cb653ddb6a5d7b8e3807a800980b49faba2be3418"} Jan 04 12:02:02 crc kubenswrapper[4797]: I0104 12:02:02.926938 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-55fd9d56b-vhwvm" podStartSLOduration=1.926923511 podStartE2EDuration="1.926923511s" podCreationTimestamp="2026-01-04 12:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:02:02.921925939 +0000 UTC m=+821.779112648" watchObservedRunningTime="2026-01-04 12:02:02.926923511 +0000 UTC m=+821.784110220" Jan 04 12:02:03 crc kubenswrapper[4797]: I0104 12:02:03.171596 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:02:03 crc kubenswrapper[4797]: I0104 12:02:03.207130 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:02:03 crc kubenswrapper[4797]: I0104 12:02:03.404081 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.922713 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" event={"ID":"2e39d5cd-01ae-48c3-9281-177a2b2591d9","Type":"ContainerStarted","Data":"5461c6e5c0a9ff5d8d503b0ba5e76af2775caa662c47edb8788443b4fb078ccc"} Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.925717 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" event={"ID":"1b22c6d0-408c-4506-a7aa-b444f4798cb9","Type":"ContainerStarted","Data":"7d8798fd179c13e8e1ff9fc16135ee117e9d05cfb0187794d759341712b91996"} Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.928976 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-x8f7b" event={"ID":"971f370a-a8c4-409f-bef8-d1c3e8bc048b","Type":"ContainerStarted","Data":"9e3aae430bbcc4397bfa1b76a0ff3c89f43c366cfe9de72712d65d3c76775c19"} Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.929069 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.931374 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zfdcm" podUID="67266054-24cd-443d-9448-184492580b64" containerName="registry-server" containerID="cri-o://54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0" gracePeriod=2 Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.931389 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" event={"ID":"2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d","Type":"ContainerStarted","Data":"711a5a082f5be7cc184575667715637341cd749d3bcfc9ec5b92d4ef10d808b2"} Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.931601 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.943292 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-52zcz" podStartSLOduration=2.590240034 podStartE2EDuration="4.943271108s" podCreationTimestamp="2026-01-04 12:02:00 +0000 UTC" firstStartedPulling="2026-01-04 12:02:01.493151693 +0000 UTC m=+820.350338412" lastFinishedPulling="2026-01-04 12:02:03.846182767 +0000 UTC m=+822.703369486" observedRunningTime="2026-01-04 12:02:04.941133642 +0000 UTC m=+823.798320381" watchObservedRunningTime="2026-01-04 12:02:04.943271108 +0000 UTC m=+823.800457817" Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.967830 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" podStartSLOduration=2.973733608 podStartE2EDuration="4.967802466s" podCreationTimestamp="2026-01-04 12:02:00 +0000 UTC" firstStartedPulling="2026-01-04 12:02:01.891266833 +0000 UTC m=+820.748453532" lastFinishedPulling="2026-01-04 12:02:03.885335681 +0000 UTC m=+822.742522390" observedRunningTime="2026-01-04 12:02:04.962709061 +0000 UTC m=+823.819895790" watchObservedRunningTime="2026-01-04 12:02:04.967802466 +0000 UTC m=+823.824989195" Jan 04 12:02:04 crc kubenswrapper[4797]: I0104 12:02:04.992271 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-x8f7b" podStartSLOduration=2.268043569 podStartE2EDuration="4.9922193s" podCreationTimestamp="2026-01-04 12:02:00 +0000 UTC" firstStartedPulling="2026-01-04 12:02:01.164432416 +0000 UTC m=+820.021619125" lastFinishedPulling="2026-01-04 12:02:03.888608157 +0000 UTC m=+822.745794856" observedRunningTime="2026-01-04 12:02:04.987748422 +0000 UTC m=+823.844935171" watchObservedRunningTime="2026-01-04 12:02:04.9922193 +0000 UTC m=+823.849406019" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.366680 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.395770 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4txn\" (UniqueName: \"kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn\") pod \"67266054-24cd-443d-9448-184492580b64\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.395850 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content\") pod \"67266054-24cd-443d-9448-184492580b64\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.395896 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities\") pod \"67266054-24cd-443d-9448-184492580b64\" (UID: \"67266054-24cd-443d-9448-184492580b64\") " Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.397015 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities" (OuterVolumeSpecName: "utilities") pod "67266054-24cd-443d-9448-184492580b64" (UID: "67266054-24cd-443d-9448-184492580b64"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.402616 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn" (OuterVolumeSpecName: "kube-api-access-z4txn") pod "67266054-24cd-443d-9448-184492580b64" (UID: "67266054-24cd-443d-9448-184492580b64"). InnerVolumeSpecName "kube-api-access-z4txn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.497094 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.497118 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4txn\" (UniqueName: \"kubernetes.io/projected/67266054-24cd-443d-9448-184492580b64-kube-api-access-z4txn\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.535604 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67266054-24cd-443d-9448-184492580b64" (UID: "67266054-24cd-443d-9448-184492580b64"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.598870 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67266054-24cd-443d-9448-184492580b64-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.941848 4797 generic.go:334] "Generic (PLEG): container finished" podID="67266054-24cd-443d-9448-184492580b64" containerID="54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0" exitCode=0 Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.941963 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zfdcm" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.942014 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerDied","Data":"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0"} Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.942373 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zfdcm" event={"ID":"67266054-24cd-443d-9448-184492580b64","Type":"ContainerDied","Data":"ddb42cd50f1248f7135ebf2c8cbd027864f3077f2baa9e8447e781d9e62d4445"} Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.942400 4797 scope.go:117] "RemoveContainer" containerID="54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.957153 4797 scope.go:117] "RemoveContainer" containerID="d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.976802 4797 scope.go:117] "RemoveContainer" containerID="ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77" Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.996110 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:02:05 crc kubenswrapper[4797]: I0104 12:02:05.999373 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zfdcm"] Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.013406 4797 scope.go:117] "RemoveContainer" containerID="54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0" Jan 04 12:02:06 crc kubenswrapper[4797]: E0104 12:02:06.014500 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0\": container with ID starting with 54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0 not found: ID does not exist" containerID="54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.014558 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0"} err="failed to get container status \"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0\": rpc error: code = NotFound desc = could not find container \"54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0\": container with ID starting with 54d36fd5fd6593941b86d39e1ae038fead0235f4b58b52081307ad9fe640f1a0 not found: ID does not exist" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.014591 4797 scope.go:117] "RemoveContainer" containerID="d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300" Jan 04 12:02:06 crc kubenswrapper[4797]: E0104 12:02:06.015074 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300\": container with ID starting with d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300 not found: ID does not exist" containerID="d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.015099 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300"} err="failed to get container status \"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300\": rpc error: code = NotFound desc = could not find container \"d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300\": container with ID starting with d6ddd504a7c00edde90685c3bb9a9431a9e9c6a405e75f5b5aca30e376675300 not found: ID does not exist" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.015115 4797 scope.go:117] "RemoveContainer" containerID="ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77" Jan 04 12:02:06 crc kubenswrapper[4797]: E0104 12:02:06.016675 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77\": container with ID starting with ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77 not found: ID does not exist" containerID="ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.016711 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77"} err="failed to get container status \"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77\": rpc error: code = NotFound desc = could not find container \"ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77\": container with ID starting with ad6a2c97671122bd3ce11f11281897feafc3bc5258c7c5319db65457c7871b77 not found: ID does not exist" Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.958751 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" event={"ID":"2e39d5cd-01ae-48c3-9281-177a2b2591d9","Type":"ContainerStarted","Data":"f8b24f048482ea0815a3caaddf2f24a5236c00882291d239a8ae904637fc8ff7"} Jan 04 12:02:06 crc kubenswrapper[4797]: I0104 12:02:06.983850 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-5j8jd" podStartSLOduration=2.7429158449999997 podStartE2EDuration="6.983816964s" podCreationTimestamp="2026-01-04 12:02:00 +0000 UTC" firstStartedPulling="2026-01-04 12:02:01.596869931 +0000 UTC m=+820.454056640" lastFinishedPulling="2026-01-04 12:02:05.83777105 +0000 UTC m=+824.694957759" observedRunningTime="2026-01-04 12:02:06.982681844 +0000 UTC m=+825.839868563" watchObservedRunningTime="2026-01-04 12:02:06.983816964 +0000 UTC m=+825.841003713" Jan 04 12:02:07 crc kubenswrapper[4797]: I0104 12:02:07.487040 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67266054-24cd-443d-9448-184492580b64" path="/var/lib/kubelet/pods/67266054-24cd-443d-9448-184492580b64/volumes" Jan 04 12:02:11 crc kubenswrapper[4797]: I0104 12:02:11.161252 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-x8f7b" Jan 04 12:02:11 crc kubenswrapper[4797]: I0104 12:02:11.420104 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:11 crc kubenswrapper[4797]: I0104 12:02:11.420188 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:11 crc kubenswrapper[4797]: I0104 12:02:11.428962 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:12 crc kubenswrapper[4797]: I0104 12:02:12.006934 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-55fd9d56b-vhwvm" Jan 04 12:02:12 crc kubenswrapper[4797]: I0104 12:02:12.094234 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 12:02:21 crc kubenswrapper[4797]: I0104 12:02:21.708452 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-fvdtc" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.351222 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr"] Jan 04 12:02:36 crc kubenswrapper[4797]: E0104 12:02:36.352759 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67266054-24cd-443d-9448-184492580b64" containerName="extract-content" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.352835 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="67266054-24cd-443d-9448-184492580b64" containerName="extract-content" Jan 04 12:02:36 crc kubenswrapper[4797]: E0104 12:02:36.352897 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67266054-24cd-443d-9448-184492580b64" containerName="registry-server" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.352945 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="67266054-24cd-443d-9448-184492580b64" containerName="registry-server" Jan 04 12:02:36 crc kubenswrapper[4797]: E0104 12:02:36.353024 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67266054-24cd-443d-9448-184492580b64" containerName="extract-utilities" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.353090 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="67266054-24cd-443d-9448-184492580b64" containerName="extract-utilities" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.353243 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="67266054-24cd-443d-9448-184492580b64" containerName="registry-server" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.354174 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.358398 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.370865 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr"] Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.453656 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.454165 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.454272 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44dqm\" (UniqueName: \"kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.556260 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.556371 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.556492 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44dqm\" (UniqueName: \"kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.557157 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.557260 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.578268 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44dqm\" (UniqueName: \"kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:36 crc kubenswrapper[4797]: I0104 12:02:36.706593 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.031902 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr"] Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.144398 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-wftnf" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" containerName="console" containerID="cri-o://49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160" gracePeriod=15 Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.195170 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerStarted","Data":"efb718a95cf6441c326f6f84f8b2326ca80b4c4f1c0b562f5f658a6c23855d53"} Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.195215 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerStarted","Data":"66b2972666d5538eb6c81719ef100968578a853994c16ea425348df4af5f8587"} Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.524464 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wftnf_fad36179-c797-4ea1-b751-9cf83b762fef/console/0.log" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.524555 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633016 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633129 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633155 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633256 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633285 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzvf9\" (UniqueName: \"kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633308 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633337 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert\") pod \"fad36179-c797-4ea1-b751-9cf83b762fef\" (UID: \"fad36179-c797-4ea1-b751-9cf83b762fef\") " Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633831 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config" (OuterVolumeSpecName: "console-config") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633856 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.633836 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca" (OuterVolumeSpecName: "service-ca") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.634454 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.639142 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9" (OuterVolumeSpecName: "kube-api-access-lzvf9") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "kube-api-access-lzvf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.639236 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.640418 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "fad36179-c797-4ea1-b751-9cf83b762fef" (UID: "fad36179-c797-4ea1-b751-9cf83b762fef"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734313 4797 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734357 4797 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734373 4797 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734386 4797 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-console-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734400 4797 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fad36179-c797-4ea1-b751-9cf83b762fef-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734413 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzvf9\" (UniqueName: \"kubernetes.io/projected/fad36179-c797-4ea1-b751-9cf83b762fef-kube-api-access-lzvf9\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:37 crc kubenswrapper[4797]: I0104 12:02:37.734428 4797 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fad36179-c797-4ea1-b751-9cf83b762fef-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205373 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wftnf_fad36179-c797-4ea1-b751-9cf83b762fef/console/0.log" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205672 4797 generic.go:334] "Generic (PLEG): container finished" podID="fad36179-c797-4ea1-b751-9cf83b762fef" containerID="49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160" exitCode=2 Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205789 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wftnf" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205783 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wftnf" event={"ID":"fad36179-c797-4ea1-b751-9cf83b762fef","Type":"ContainerDied","Data":"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160"} Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205841 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wftnf" event={"ID":"fad36179-c797-4ea1-b751-9cf83b762fef","Type":"ContainerDied","Data":"59961083ac3094599d8cd29bd856e30cb8aa46004707f58a7f588ee7abad0ba5"} Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.205869 4797 scope.go:117] "RemoveContainer" containerID="49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.215614 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerID="efb718a95cf6441c326f6f84f8b2326ca80b4c4f1c0b562f5f658a6c23855d53" exitCode=0 Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.215674 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerDied","Data":"efb718a95cf6441c326f6f84f8b2326ca80b4c4f1c0b562f5f658a6c23855d53"} Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.254203 4797 scope.go:117] "RemoveContainer" containerID="49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160" Jan 04 12:02:38 crc kubenswrapper[4797]: E0104 12:02:38.258137 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160\": container with ID starting with 49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160 not found: ID does not exist" containerID="49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.258342 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160"} err="failed to get container status \"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160\": rpc error: code = NotFound desc = could not find container \"49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160\": container with ID starting with 49db0b38c82437fe10919a03e8ee18fb9e9262e5ea7cdee9f8f196fcadbef160 not found: ID does not exist" Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.263274 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 12:02:38 crc kubenswrapper[4797]: I0104 12:02:38.268676 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-wftnf"] Jan 04 12:02:39 crc kubenswrapper[4797]: I0104 12:02:39.482121 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" path="/var/lib/kubelet/pods/fad36179-c797-4ea1-b751-9cf83b762fef/volumes" Jan 04 12:02:40 crc kubenswrapper[4797]: I0104 12:02:40.235545 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerID="dd1fcc605bee48b0bb5c67107057cff7beb5228d3ee7cc1fed03f644a0ee435a" exitCode=0 Jan 04 12:02:40 crc kubenswrapper[4797]: I0104 12:02:40.236027 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerDied","Data":"dd1fcc605bee48b0bb5c67107057cff7beb5228d3ee7cc1fed03f644a0ee435a"} Jan 04 12:02:41 crc kubenswrapper[4797]: I0104 12:02:41.245420 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerID="5d8638362ae8ab2883902c6234876a5bf5aa8ec876e21b6bbac09fc4fc015912" exitCode=0 Jan 04 12:02:41 crc kubenswrapper[4797]: I0104 12:02:41.245491 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerDied","Data":"5d8638362ae8ab2883902c6234876a5bf5aa8ec876e21b6bbac09fc4fc015912"} Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.586200 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.708155 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util\") pod \"8fa557e6-a3f8-47cb-88a5-94840f015b14\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.708283 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44dqm\" (UniqueName: \"kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm\") pod \"8fa557e6-a3f8-47cb-88a5-94840f015b14\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.708342 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle\") pod \"8fa557e6-a3f8-47cb-88a5-94840f015b14\" (UID: \"8fa557e6-a3f8-47cb-88a5-94840f015b14\") " Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.709325 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle" (OuterVolumeSpecName: "bundle") pod "8fa557e6-a3f8-47cb-88a5-94840f015b14" (UID: "8fa557e6-a3f8-47cb-88a5-94840f015b14"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.714273 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm" (OuterVolumeSpecName: "kube-api-access-44dqm") pod "8fa557e6-a3f8-47cb-88a5-94840f015b14" (UID: "8fa557e6-a3f8-47cb-88a5-94840f015b14"). InnerVolumeSpecName "kube-api-access-44dqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.809720 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44dqm\" (UniqueName: \"kubernetes.io/projected/8fa557e6-a3f8-47cb-88a5-94840f015b14-kube-api-access-44dqm\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.809762 4797 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.906542 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util" (OuterVolumeSpecName: "util") pod "8fa557e6-a3f8-47cb-88a5-94840f015b14" (UID: "8fa557e6-a3f8-47cb-88a5-94840f015b14"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:02:42 crc kubenswrapper[4797]: I0104 12:02:42.910446 4797 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa557e6-a3f8-47cb-88a5-94840f015b14-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:02:43 crc kubenswrapper[4797]: I0104 12:02:43.263592 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" event={"ID":"8fa557e6-a3f8-47cb-88a5-94840f015b14","Type":"ContainerDied","Data":"66b2972666d5538eb6c81719ef100968578a853994c16ea425348df4af5f8587"} Jan 04 12:02:43 crc kubenswrapper[4797]: I0104 12:02:43.263666 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66b2972666d5538eb6c81719ef100968578a853994c16ea425348df4af5f8587" Jan 04 12:02:43 crc kubenswrapper[4797]: I0104 12:02:43.263716 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.410638 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd"] Jan 04 12:02:51 crc kubenswrapper[4797]: E0104 12:02:51.411395 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="pull" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411409 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="pull" Jan 04 12:02:51 crc kubenswrapper[4797]: E0104 12:02:51.411419 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" containerName="console" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411430 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" containerName="console" Jan 04 12:02:51 crc kubenswrapper[4797]: E0104 12:02:51.411450 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="util" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411459 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="util" Jan 04 12:02:51 crc kubenswrapper[4797]: E0104 12:02:51.411480 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="extract" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411491 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="extract" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411625 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad36179-c797-4ea1-b751-9cf83b762fef" containerName="console" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.411638 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fa557e6-a3f8-47cb-88a5-94840f015b14" containerName="extract" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.412117 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.416841 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.416849 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.417310 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-wsrjv" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.417446 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.422404 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.439817 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd"] Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.537346 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm22s\" (UniqueName: \"kubernetes.io/projected/5ca974ff-1854-4e86-92e7-d5f1b0f66571-kube-api-access-wm22s\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.537400 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-webhook-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.537436 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-apiservice-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.638130 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-apiservice-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.638245 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm22s\" (UniqueName: \"kubernetes.io/projected/5ca974ff-1854-4e86-92e7-d5f1b0f66571-kube-api-access-wm22s\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.638287 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-webhook-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.644057 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-webhook-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.659884 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ca974ff-1854-4e86-92e7-d5f1b0f66571-apiservice-cert\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.662709 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm22s\" (UniqueName: \"kubernetes.io/projected/5ca974ff-1854-4e86-92e7-d5f1b0f66571-kube-api-access-wm22s\") pod \"metallb-operator-controller-manager-5448cb6b96-v2wrd\" (UID: \"5ca974ff-1854-4e86-92e7-d5f1b0f66571\") " pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.727560 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.786673 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45"] Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.787313 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.792243 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xpr5c" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.792865 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.792935 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.809451 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45"] Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.946741 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-apiservice-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.947021 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-webhook-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:51 crc kubenswrapper[4797]: I0104 12:02:51.947058 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjcqv\" (UniqueName: \"kubernetes.io/projected/598502ca-9f0c-4b07-ac22-3d50604f562a-kube-api-access-fjcqv\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.024694 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd"] Jan 04 12:02:52 crc kubenswrapper[4797]: W0104 12:02:52.034187 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ca974ff_1854_4e86_92e7_d5f1b0f66571.slice/crio-27debb5e67f9f0fc8c1b1246b63185477d94bcf6ecd31dd733881945c51292e4 WatchSource:0}: Error finding container 27debb5e67f9f0fc8c1b1246b63185477d94bcf6ecd31dd733881945c51292e4: Status 404 returned error can't find the container with id 27debb5e67f9f0fc8c1b1246b63185477d94bcf6ecd31dd733881945c51292e4 Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.048330 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjcqv\" (UniqueName: \"kubernetes.io/projected/598502ca-9f0c-4b07-ac22-3d50604f562a-kube-api-access-fjcqv\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.048471 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-apiservice-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.050688 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-webhook-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.053674 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-webhook-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.057728 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/598502ca-9f0c-4b07-ac22-3d50604f562a-apiservice-cert\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.063177 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjcqv\" (UniqueName: \"kubernetes.io/projected/598502ca-9f0c-4b07-ac22-3d50604f562a-kube-api-access-fjcqv\") pod \"metallb-operator-webhook-server-fb667c75f-f5h45\" (UID: \"598502ca-9f0c-4b07-ac22-3d50604f562a\") " pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.109436 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.315012 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" event={"ID":"5ca974ff-1854-4e86-92e7-d5f1b0f66571","Type":"ContainerStarted","Data":"27debb5e67f9f0fc8c1b1246b63185477d94bcf6ecd31dd733881945c51292e4"} Jan 04 12:02:52 crc kubenswrapper[4797]: I0104 12:02:52.359568 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45"] Jan 04 12:02:52 crc kubenswrapper[4797]: W0104 12:02:52.363903 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod598502ca_9f0c_4b07_ac22_3d50604f562a.slice/crio-75273c9a0b657fb821c80f44930157060489e1f9e4b7718f915d941221d1bc7b WatchSource:0}: Error finding container 75273c9a0b657fb821c80f44930157060489e1f9e4b7718f915d941221d1bc7b: Status 404 returned error can't find the container with id 75273c9a0b657fb821c80f44930157060489e1f9e4b7718f915d941221d1bc7b Jan 04 12:02:53 crc kubenswrapper[4797]: I0104 12:02:53.321580 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" event={"ID":"598502ca-9f0c-4b07-ac22-3d50604f562a","Type":"ContainerStarted","Data":"75273c9a0b657fb821c80f44930157060489e1f9e4b7718f915d941221d1bc7b"} Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.348704 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" event={"ID":"598502ca-9f0c-4b07-ac22-3d50604f562a","Type":"ContainerStarted","Data":"62856e00c0626b80a31e3efb3e20e887a4900dfb8e0fd09596a8fcefcfa2f7ad"} Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.349345 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.350809 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" event={"ID":"5ca974ff-1854-4e86-92e7-d5f1b0f66571","Type":"ContainerStarted","Data":"ac6b43445102c24dd72a610810e70e0df2f072091ee48b6600d91680cff8266a"} Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.351090 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.380911 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" podStartSLOduration=2.336649536 podStartE2EDuration="6.380886562s" podCreationTimestamp="2026-01-04 12:02:51 +0000 UTC" firstStartedPulling="2026-01-04 12:02:52.366923342 +0000 UTC m=+871.224110041" lastFinishedPulling="2026-01-04 12:02:56.411160318 +0000 UTC m=+875.268347067" observedRunningTime="2026-01-04 12:02:57.378712545 +0000 UTC m=+876.235899314" watchObservedRunningTime="2026-01-04 12:02:57.380886562 +0000 UTC m=+876.238073311" Jan 04 12:02:57 crc kubenswrapper[4797]: I0104 12:02:57.415058 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" podStartSLOduration=2.077884149 podStartE2EDuration="6.415040249s" podCreationTimestamp="2026-01-04 12:02:51 +0000 UTC" firstStartedPulling="2026-01-04 12:02:52.037291384 +0000 UTC m=+870.894478093" lastFinishedPulling="2026-01-04 12:02:56.374447444 +0000 UTC m=+875.231634193" observedRunningTime="2026-01-04 12:02:57.413775906 +0000 UTC m=+876.270962655" watchObservedRunningTime="2026-01-04 12:02:57.415040249 +0000 UTC m=+876.272226958" Jan 04 12:03:12 crc kubenswrapper[4797]: I0104 12:03:12.116759 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-fb667c75f-f5h45" Jan 04 12:03:19 crc kubenswrapper[4797]: I0104 12:03:19.492620 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:03:19 crc kubenswrapper[4797]: I0104 12:03:19.493227 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:03:31 crc kubenswrapper[4797]: I0104 12:03:31.731684 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5448cb6b96-v2wrd" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.448026 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-qkjmb"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.459115 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.461519 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.461943 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bpbwv" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.462221 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.483775 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.489373 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.490358 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.496792 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.543868 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8vt7n"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.544919 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.546518 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.546520 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.547098 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hz8wc" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.547148 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.562264 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-r6lt7"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.563363 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.568627 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.582125 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-r6lt7"] Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594334 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8q5p\" (UniqueName: \"kubernetes.io/projected/313f749d-6032-4016-a071-6e5e0fa06d87-kube-api-access-k8q5p\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594382 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-conf\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594457 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594497 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f749d-6032-4016-a071-6e5e0fa06d87-metrics-certs\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594563 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-sockets\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594579 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-metrics\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594608 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/313f749d-6032-4016-a071-6e5e0fa06d87-frr-startup\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594629 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-reloader\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.594680 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht7dt\" (UniqueName: \"kubernetes.io/projected/48107c7b-5bf1-459a-bdbf-ea855afdad7e-kube-api-access-ht7dt\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696193 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zxwt\" (UniqueName: \"kubernetes.io/projected/682a1858-fd65-41a5-aaf2-3def57491fec-kube-api-access-8zxwt\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696238 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-cert\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696284 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-metrics-certs\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696337 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696396 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbgwv\" (UniqueName: \"kubernetes.io/projected/600cb394-290a-4c70-bd38-2e32e170fa8b-kube-api-access-dbgwv\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696429 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8q5p\" (UniqueName: \"kubernetes.io/projected/313f749d-6032-4016-a071-6e5e0fa06d87-kube-api-access-k8q5p\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696454 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-conf\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696533 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696552 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f749d-6032-4016-a071-6e5e0fa06d87-metrics-certs\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696591 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-sockets\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696617 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-metrics\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696638 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/313f749d-6032-4016-a071-6e5e0fa06d87-frr-startup\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696666 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-reloader\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696686 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht7dt\" (UniqueName: \"kubernetes.io/projected/48107c7b-5bf1-459a-bdbf-ea855afdad7e-kube-api-access-ht7dt\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696714 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/682a1858-fd65-41a5-aaf2-3def57491fec-metallb-excludel2\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.696803 4797 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.696864 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert podName:48107c7b-5bf1-459a-bdbf-ea855afdad7e nodeName:}" failed. No retries permitted until 2026-01-04 12:03:33.196849858 +0000 UTC m=+912.054036567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert") pod "frr-k8s-webhook-server-7784b6fcf-ncm97" (UID: "48107c7b-5bf1-459a-bdbf-ea855afdad7e") : secret "frr-k8s-webhook-server-cert" not found Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.696860 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-conf\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.697011 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-frr-sockets\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.697139 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-reloader\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.697238 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/313f749d-6032-4016-a071-6e5e0fa06d87-metrics\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.697860 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/313f749d-6032-4016-a071-6e5e0fa06d87-frr-startup\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.703166 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f749d-6032-4016-a071-6e5e0fa06d87-metrics-certs\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.714317 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8q5p\" (UniqueName: \"kubernetes.io/projected/313f749d-6032-4016-a071-6e5e0fa06d87-kube-api-access-k8q5p\") pod \"frr-k8s-qkjmb\" (UID: \"313f749d-6032-4016-a071-6e5e0fa06d87\") " pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.724185 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht7dt\" (UniqueName: \"kubernetes.io/projected/48107c7b-5bf1-459a-bdbf-ea855afdad7e-kube-api-access-ht7dt\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.784056 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.797951 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798085 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/682a1858-fd65-41a5-aaf2-3def57491fec-metallb-excludel2\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.798097 4797 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798120 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zxwt\" (UniqueName: \"kubernetes.io/projected/682a1858-fd65-41a5-aaf2-3def57491fec-kube-api-access-8zxwt\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798139 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-cert\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.798164 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs podName:682a1858-fd65-41a5-aaf2-3def57491fec nodeName:}" failed. No retries permitted until 2026-01-04 12:03:33.298142639 +0000 UTC m=+912.155329348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs") pod "speaker-8vt7n" (UID: "682a1858-fd65-41a5-aaf2-3def57491fec") : secret "speaker-certs-secret" not found Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798185 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-metrics-certs\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798218 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798256 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbgwv\" (UniqueName: \"kubernetes.io/projected/600cb394-290a-4c70-bd38-2e32e170fa8b-kube-api-access-dbgwv\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.798450 4797 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 04 12:03:32 crc kubenswrapper[4797]: E0104 12:03:32.798481 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist podName:682a1858-fd65-41a5-aaf2-3def57491fec nodeName:}" failed. No retries permitted until 2026-01-04 12:03:33.298472677 +0000 UTC m=+912.155659386 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist") pod "speaker-8vt7n" (UID: "682a1858-fd65-41a5-aaf2-3def57491fec") : secret "metallb-memberlist" not found Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.798868 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/682a1858-fd65-41a5-aaf2-3def57491fec-metallb-excludel2\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.801507 4797 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.802590 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-metrics-certs\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.811441 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/600cb394-290a-4c70-bd38-2e32e170fa8b-cert\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.819672 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbgwv\" (UniqueName: \"kubernetes.io/projected/600cb394-290a-4c70-bd38-2e32e170fa8b-kube-api-access-dbgwv\") pod \"controller-5bddd4b946-r6lt7\" (UID: \"600cb394-290a-4c70-bd38-2e32e170fa8b\") " pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.822686 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zxwt\" (UniqueName: \"kubernetes.io/projected/682a1858-fd65-41a5-aaf2-3def57491fec-kube-api-access-8zxwt\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:32 crc kubenswrapper[4797]: I0104 12:03:32.877576 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.059132 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-r6lt7"] Jan 04 12:03:33 crc kubenswrapper[4797]: W0104 12:03:33.061440 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod600cb394_290a_4c70_bd38_2e32e170fa8b.slice/crio-5766ee23ed73313cd104029705193075248f9514e6695c440659a5bc563cbe04 WatchSource:0}: Error finding container 5766ee23ed73313cd104029705193075248f9514e6695c440659a5bc563cbe04: Status 404 returned error can't find the container with id 5766ee23ed73313cd104029705193075248f9514e6695c440659a5bc563cbe04 Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.202935 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.209482 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/48107c7b-5bf1-459a-bdbf-ea855afdad7e-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ncm97\" (UID: \"48107c7b-5bf1-459a-bdbf-ea855afdad7e\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.304443 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.304558 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.307864 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-metrics-certs\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.308342 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/682a1858-fd65-41a5-aaf2-3def57491fec-memberlist\") pod \"speaker-8vt7n\" (UID: \"682a1858-fd65-41a5-aaf2-3def57491fec\") " pod="metallb-system/speaker-8vt7n" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.412025 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.460218 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8vt7n" Jan 04 12:03:33 crc kubenswrapper[4797]: W0104 12:03:33.500464 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod682a1858_fd65_41a5_aaf2_3def57491fec.slice/crio-f70295025d476b8a8df289ba9e1f43392eb94c8a3627758103c2dc8639ed0e9c WatchSource:0}: Error finding container f70295025d476b8a8df289ba9e1f43392eb94c8a3627758103c2dc8639ed0e9c: Status 404 returned error can't find the container with id f70295025d476b8a8df289ba9e1f43392eb94c8a3627758103c2dc8639ed0e9c Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.584042 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-r6lt7" event={"ID":"600cb394-290a-4c70-bd38-2e32e170fa8b","Type":"ContainerStarted","Data":"632bd39432252c4dd20be5aa8fcb84820b419b2e27925e84196e8b9ca49d0a62"} Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.584331 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-r6lt7" event={"ID":"600cb394-290a-4c70-bd38-2e32e170fa8b","Type":"ContainerStarted","Data":"09c08b5d8d7f887b12ff4791946a3e6518057c6ca4a6d7ded366c297a1e52e3b"} Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.584396 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-r6lt7" event={"ID":"600cb394-290a-4c70-bd38-2e32e170fa8b","Type":"ContainerStarted","Data":"5766ee23ed73313cd104029705193075248f9514e6695c440659a5bc563cbe04"} Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.585309 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.599542 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"2b1e5a52a22d4a14104e30b5ad1b4bcf01de5513307d5d1bf4b0708679821d76"} Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.600775 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8vt7n" event={"ID":"682a1858-fd65-41a5-aaf2-3def57491fec","Type":"ContainerStarted","Data":"f70295025d476b8a8df289ba9e1f43392eb94c8a3627758103c2dc8639ed0e9c"} Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.605139 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97"] Jan 04 12:03:33 crc kubenswrapper[4797]: I0104 12:03:33.607141 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-r6lt7" podStartSLOduration=1.60712107 podStartE2EDuration="1.60712107s" podCreationTimestamp="2026-01-04 12:03:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:03:33.606530715 +0000 UTC m=+912.463717444" watchObservedRunningTime="2026-01-04 12:03:33.60712107 +0000 UTC m=+912.464307779" Jan 04 12:03:33 crc kubenswrapper[4797]: W0104 12:03:33.615710 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48107c7b_5bf1_459a_bdbf_ea855afdad7e.slice/crio-2458efdec7e95c3fac2a60d34793ec8e2124ec3869e6be064ff2874f13c8b48c WatchSource:0}: Error finding container 2458efdec7e95c3fac2a60d34793ec8e2124ec3869e6be064ff2874f13c8b48c: Status 404 returned error can't find the container with id 2458efdec7e95c3fac2a60d34793ec8e2124ec3869e6be064ff2874f13c8b48c Jan 04 12:03:34 crc kubenswrapper[4797]: I0104 12:03:34.610343 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" event={"ID":"48107c7b-5bf1-459a-bdbf-ea855afdad7e","Type":"ContainerStarted","Data":"2458efdec7e95c3fac2a60d34793ec8e2124ec3869e6be064ff2874f13c8b48c"} Jan 04 12:03:34 crc kubenswrapper[4797]: I0104 12:03:34.616142 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8vt7n" event={"ID":"682a1858-fd65-41a5-aaf2-3def57491fec","Type":"ContainerStarted","Data":"43ff17f7938d5878a23697544b96234d224ec6ec17244fc4bbf9af7eb5d1f2a2"} Jan 04 12:03:34 crc kubenswrapper[4797]: I0104 12:03:34.616204 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8vt7n" event={"ID":"682a1858-fd65-41a5-aaf2-3def57491fec","Type":"ContainerStarted","Data":"afd972c7269a5cc511e6c12a87eebf960e2e1c67e59da4385c2bab0e92a0305f"} Jan 04 12:03:34 crc kubenswrapper[4797]: I0104 12:03:34.616316 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8vt7n" Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.665866 4797 generic.go:334] "Generic (PLEG): container finished" podID="313f749d-6032-4016-a071-6e5e0fa06d87" containerID="efa34c6b60f9cca27c1c88aa92d4af502e61f9cb8c6c01bf432c4fe23de31d16" exitCode=0 Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.665949 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerDied","Data":"efa34c6b60f9cca27c1c88aa92d4af502e61f9cb8c6c01bf432c4fe23de31d16"} Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.668243 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" event={"ID":"48107c7b-5bf1-459a-bdbf-ea855afdad7e","Type":"ContainerStarted","Data":"c93a1d515789c5f81238de2613125618d0cef419d83dd6fb724595d06d557321"} Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.668469 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.693601 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8vt7n" podStartSLOduration=8.693585531 podStartE2EDuration="8.693585531s" podCreationTimestamp="2026-01-04 12:03:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:03:34.638441091 +0000 UTC m=+913.495627800" watchObservedRunningTime="2026-01-04 12:03:40.693585531 +0000 UTC m=+919.550772250" Jan 04 12:03:40 crc kubenswrapper[4797]: I0104 12:03:40.714615 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" podStartSLOduration=2.582143963 podStartE2EDuration="8.714596333s" podCreationTimestamp="2026-01-04 12:03:32 +0000 UTC" firstStartedPulling="2026-01-04 12:03:33.618267643 +0000 UTC m=+912.475454342" lastFinishedPulling="2026-01-04 12:03:39.750720013 +0000 UTC m=+918.607906712" observedRunningTime="2026-01-04 12:03:40.710036123 +0000 UTC m=+919.567222832" watchObservedRunningTime="2026-01-04 12:03:40.714596333 +0000 UTC m=+919.571783032" Jan 04 12:03:41 crc kubenswrapper[4797]: I0104 12:03:41.679815 4797 generic.go:334] "Generic (PLEG): container finished" podID="313f749d-6032-4016-a071-6e5e0fa06d87" containerID="b883e6ef2aa94f73399ed985bd21a1c192fb54cc22f75c0cd80599cf72474c90" exitCode=0 Jan 04 12:03:41 crc kubenswrapper[4797]: I0104 12:03:41.679933 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerDied","Data":"b883e6ef2aa94f73399ed985bd21a1c192fb54cc22f75c0cd80599cf72474c90"} Jan 04 12:03:42 crc kubenswrapper[4797]: I0104 12:03:42.688447 4797 generic.go:334] "Generic (PLEG): container finished" podID="313f749d-6032-4016-a071-6e5e0fa06d87" containerID="6654945dc8858192b900b3073d38222d88846cffb8987a1d96b126449586b8ed" exitCode=0 Jan 04 12:03:42 crc kubenswrapper[4797]: I0104 12:03:42.688499 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerDied","Data":"6654945dc8858192b900b3073d38222d88846cffb8987a1d96b126449586b8ed"} Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.464346 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8vt7n" Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.697795 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"ffe88af33b5f1634500eae7b02810fcb03cddd5fa8b01ec2a45ee65d120fa2c5"} Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.697846 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"df52aab44ba57eefbd1f439020f2b1888b934d8cecfa5ca244ebc7929571a924"} Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.697861 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"09d2ac78e7c1b9bfd4d234a181c6185ec79fa8732f762e2697f88fdf4e8e7d38"} Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.697872 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"ed93cafaabc1461343c747be6c5c753e3e693f7732fce6301a54db3897dc7c0c"} Jan 04 12:03:43 crc kubenswrapper[4797]: I0104 12:03:43.697883 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"a4974e5665e5c6645807548e4f2dcc27968213c6037dbccf2abd6fd8e8482f10"} Jan 04 12:03:44 crc kubenswrapper[4797]: I0104 12:03:44.710113 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkjmb" event={"ID":"313f749d-6032-4016-a071-6e5e0fa06d87","Type":"ContainerStarted","Data":"070fe98b07185d0b41a374f2cdbd39bfbf170d9c4705d06feb25264416bf7787"} Jan 04 12:03:44 crc kubenswrapper[4797]: I0104 12:03:44.710512 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:44 crc kubenswrapper[4797]: I0104 12:03:44.743060 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-qkjmb" podStartSLOduration=5.929540123 podStartE2EDuration="12.743038714s" podCreationTimestamp="2026-01-04 12:03:32 +0000 UTC" firstStartedPulling="2026-01-04 12:03:32.930396523 +0000 UTC m=+911.787583232" lastFinishedPulling="2026-01-04 12:03:39.743895114 +0000 UTC m=+918.601081823" observedRunningTime="2026-01-04 12:03:44.740508088 +0000 UTC m=+923.597694837" watchObservedRunningTime="2026-01-04 12:03:44.743038714 +0000 UTC m=+923.600225463" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.032403 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h"] Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.034360 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.036659 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.042566 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h"] Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.188231 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.188423 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qwwf\" (UniqueName: \"kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.188485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.290211 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.290309 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qwwf\" (UniqueName: \"kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.290346 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.291025 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.291056 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.323425 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qwwf\" (UniqueName: \"kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.352069 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.610369 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h"] Jan 04 12:03:45 crc kubenswrapper[4797]: I0104 12:03:45.724399 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerStarted","Data":"41abd0ffccb1223845ce4b4777bab2dd60de59683727b5db39022faf184d5731"} Jan 04 12:03:46 crc kubenswrapper[4797]: I0104 12:03:46.731288 4797 generic.go:334] "Generic (PLEG): container finished" podID="01093944-b8c9-40f4-a688-4fcae8488819" containerID="ef2eefc0c2f2a858865ea01f05b1a129e51d1c38538eda2c6abc2304062cc3c5" exitCode=0 Jan 04 12:03:46 crc kubenswrapper[4797]: I0104 12:03:46.731368 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerDied","Data":"ef2eefc0c2f2a858865ea01f05b1a129e51d1c38538eda2c6abc2304062cc3c5"} Jan 04 12:03:47 crc kubenswrapper[4797]: I0104 12:03:47.784684 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:47 crc kubenswrapper[4797]: I0104 12:03:47.824801 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:49 crc kubenswrapper[4797]: I0104 12:03:49.492937 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:03:49 crc kubenswrapper[4797]: I0104 12:03:49.493250 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:03:50 crc kubenswrapper[4797]: I0104 12:03:50.760760 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerStarted","Data":"b003735bfb03d68ee41573ecd1ccb482c51b3cd50fe9fef5a0cdcacce2c9c64d"} Jan 04 12:03:51 crc kubenswrapper[4797]: I0104 12:03:51.778642 4797 generic.go:334] "Generic (PLEG): container finished" podID="01093944-b8c9-40f4-a688-4fcae8488819" containerID="b003735bfb03d68ee41573ecd1ccb482c51b3cd50fe9fef5a0cdcacce2c9c64d" exitCode=0 Jan 04 12:03:51 crc kubenswrapper[4797]: I0104 12:03:51.778725 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerDied","Data":"b003735bfb03d68ee41573ecd1ccb482c51b3cd50fe9fef5a0cdcacce2c9c64d"} Jan 04 12:03:52 crc kubenswrapper[4797]: I0104 12:03:52.786443 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-qkjmb" Jan 04 12:03:52 crc kubenswrapper[4797]: I0104 12:03:52.787527 4797 generic.go:334] "Generic (PLEG): container finished" podID="01093944-b8c9-40f4-a688-4fcae8488819" containerID="5f2c38f320296e897fdaa5ec3cb9897ee5063d27eb38ac9f17ca9d7682dc1a1b" exitCode=0 Jan 04 12:03:52 crc kubenswrapper[4797]: I0104 12:03:52.787592 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerDied","Data":"5f2c38f320296e897fdaa5ec3cb9897ee5063d27eb38ac9f17ca9d7682dc1a1b"} Jan 04 12:03:52 crc kubenswrapper[4797]: I0104 12:03:52.881963 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-r6lt7" Jan 04 12:03:53 crc kubenswrapper[4797]: I0104 12:03:53.417615 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ncm97" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.065691 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.120900 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qwwf\" (UniqueName: \"kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf\") pod \"01093944-b8c9-40f4-a688-4fcae8488819\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.120957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util\") pod \"01093944-b8c9-40f4-a688-4fcae8488819\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.121137 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle\") pod \"01093944-b8c9-40f4-a688-4fcae8488819\" (UID: \"01093944-b8c9-40f4-a688-4fcae8488819\") " Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.122349 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle" (OuterVolumeSpecName: "bundle") pod "01093944-b8c9-40f4-a688-4fcae8488819" (UID: "01093944-b8c9-40f4-a688-4fcae8488819"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.130188 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf" (OuterVolumeSpecName: "kube-api-access-6qwwf") pod "01093944-b8c9-40f4-a688-4fcae8488819" (UID: "01093944-b8c9-40f4-a688-4fcae8488819"). InnerVolumeSpecName "kube-api-access-6qwwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.133626 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util" (OuterVolumeSpecName: "util") pod "01093944-b8c9-40f4-a688-4fcae8488819" (UID: "01093944-b8c9-40f4-a688-4fcae8488819"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.222647 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qwwf\" (UniqueName: \"kubernetes.io/projected/01093944-b8c9-40f4-a688-4fcae8488819-kube-api-access-6qwwf\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.222687 4797 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.222696 4797 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01093944-b8c9-40f4-a688-4fcae8488819-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.803320 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" event={"ID":"01093944-b8c9-40f4-a688-4fcae8488819","Type":"ContainerDied","Data":"41abd0ffccb1223845ce4b4777bab2dd60de59683727b5db39022faf184d5731"} Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.803382 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41abd0ffccb1223845ce4b4777bab2dd60de59683727b5db39022faf184d5731" Jan 04 12:03:54 crc kubenswrapper[4797]: I0104 12:03:54.803409 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.603198 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n"] Jan 04 12:03:57 crc kubenswrapper[4797]: E0104 12:03:57.603809 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="util" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.603830 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="util" Jan 04 12:03:57 crc kubenswrapper[4797]: E0104 12:03:57.603854 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="extract" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.603869 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="extract" Jan 04 12:03:57 crc kubenswrapper[4797]: E0104 12:03:57.603897 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="pull" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.603910 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="pull" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.604121 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="01093944-b8c9-40f4-a688-4fcae8488819" containerName="extract" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.604753 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.609968 4797 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-r8pg8" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.611083 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.612583 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.627889 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n"] Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.670115 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/21ee4ecf-09cf-412e-901f-e489d2460a1d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.670210 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9jrn\" (UniqueName: \"kubernetes.io/projected/21ee4ecf-09cf-412e-901f-e489d2460a1d-kube-api-access-d9jrn\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.771685 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/21ee4ecf-09cf-412e-901f-e489d2460a1d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.771751 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9jrn\" (UniqueName: \"kubernetes.io/projected/21ee4ecf-09cf-412e-901f-e489d2460a1d-kube-api-access-d9jrn\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.772917 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/21ee4ecf-09cf-412e-901f-e489d2460a1d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.800891 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9jrn\" (UniqueName: \"kubernetes.io/projected/21ee4ecf-09cf-412e-901f-e489d2460a1d-kube-api-access-d9jrn\") pod \"cert-manager-operator-controller-manager-64cf6dff88-cnj7n\" (UID: \"21ee4ecf-09cf-412e-901f-e489d2460a1d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:57 crc kubenswrapper[4797]: I0104 12:03:57.919878 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" Jan 04 12:03:58 crc kubenswrapper[4797]: I0104 12:03:58.401039 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n"] Jan 04 12:03:58 crc kubenswrapper[4797]: W0104 12:03:58.414783 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21ee4ecf_09cf_412e_901f_e489d2460a1d.slice/crio-dec9d9882499546f1e792b585a0b9f3ab94398ea46fa0cfec7513afcdc8116aa WatchSource:0}: Error finding container dec9d9882499546f1e792b585a0b9f3ab94398ea46fa0cfec7513afcdc8116aa: Status 404 returned error can't find the container with id dec9d9882499546f1e792b585a0b9f3ab94398ea46fa0cfec7513afcdc8116aa Jan 04 12:03:58 crc kubenswrapper[4797]: I0104 12:03:58.826799 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" event={"ID":"21ee4ecf-09cf-412e-901f-e489d2460a1d","Type":"ContainerStarted","Data":"dec9d9882499546f1e792b585a0b9f3ab94398ea46fa0cfec7513afcdc8116aa"} Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.572294 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.585514 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.587261 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.686392 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9rl6\" (UniqueName: \"kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.686469 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.686503 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.788483 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9rl6\" (UniqueName: \"kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.788738 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.788837 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.789551 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.789558 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.819422 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9rl6\" (UniqueName: \"kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6\") pod \"redhat-marketplace-sn55l\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:05 crc kubenswrapper[4797]: I0104 12:04:05.914118 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.468707 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.894669 4797 generic.go:334] "Generic (PLEG): container finished" podID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerID="d0bdc843e72bfbb6bbe8b0bfb2ac223bed20d54b42bfdc2bdc4803a9db2c1781" exitCode=0 Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.894754 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerDied","Data":"d0bdc843e72bfbb6bbe8b0bfb2ac223bed20d54b42bfdc2bdc4803a9db2c1781"} Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.895027 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerStarted","Data":"e57264a8705208b7ac6b0a2f54a124c257b37e23116737dd541455e1958244d0"} Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.898780 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" event={"ID":"21ee4ecf-09cf-412e-901f-e489d2460a1d","Type":"ContainerStarted","Data":"a1c66ba57aab00698f794a99520ebaaccc0a243dc489866bba3418135f3fff52"} Jan 04 12:04:07 crc kubenswrapper[4797]: I0104 12:04:07.948029 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-cnj7n" podStartSLOduration=2.218543482 podStartE2EDuration="10.948008383s" podCreationTimestamp="2026-01-04 12:03:57 +0000 UTC" firstStartedPulling="2026-01-04 12:03:58.419429432 +0000 UTC m=+937.276616151" lastFinishedPulling="2026-01-04 12:04:07.148894343 +0000 UTC m=+946.006081052" observedRunningTime="2026-01-04 12:04:07.945877377 +0000 UTC m=+946.803064106" watchObservedRunningTime="2026-01-04 12:04:07.948008383 +0000 UTC m=+946.805195112" Jan 04 12:04:08 crc kubenswrapper[4797]: I0104 12:04:08.912173 4797 generic.go:334] "Generic (PLEG): container finished" podID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerID="8cacfb67c206ab3e8f40b890184c8a3c0bf9136aea56555856476e22463ac7bf" exitCode=0 Jan 04 12:04:08 crc kubenswrapper[4797]: I0104 12:04:08.915373 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerDied","Data":"8cacfb67c206ab3e8f40b890184c8a3c0bf9136aea56555856476e22463ac7bf"} Jan 04 12:04:09 crc kubenswrapper[4797]: I0104 12:04:09.920603 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerStarted","Data":"1b629dc96dcc76e11b20d4f4d6ab0e503ee34e2d018e0094239e34a4049a857b"} Jan 04 12:04:09 crc kubenswrapper[4797]: I0104 12:04:09.938733 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sn55l" podStartSLOduration=3.512057741 podStartE2EDuration="4.938713927s" podCreationTimestamp="2026-01-04 12:04:05 +0000 UTC" firstStartedPulling="2026-01-04 12:04:07.898107283 +0000 UTC m=+946.755294032" lastFinishedPulling="2026-01-04 12:04:09.324763509 +0000 UTC m=+948.181950218" observedRunningTime="2026-01-04 12:04:09.938368588 +0000 UTC m=+948.795555307" watchObservedRunningTime="2026-01-04 12:04:09.938713927 +0000 UTC m=+948.795900636" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.611276 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-nxlgw"] Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.613495 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.616721 4797 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-cbnv5" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.616870 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.617037 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.631219 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-nxlgw"] Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.667941 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.668092 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbk56\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-kube-api-access-gbk56\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.769006 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbk56\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-kube-api-access-gbk56\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.769088 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.786764 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbk56\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-kube-api-access-gbk56\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.787427 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8da1d315-7862-496b-bb8d-0b8a56e7ebe8-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-nxlgw\" (UID: \"8da1d315-7862-496b-bb8d-0b8a56e7ebe8\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.930510 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.977670 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.979875 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:11 crc kubenswrapper[4797]: I0104 12:04:11.992151 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.072485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8qb8\" (UniqueName: \"kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.072603 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.072642 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.090323 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gml2q"] Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.091925 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.094726 4797 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-5xvf8" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.098451 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gml2q"] Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.173970 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.174271 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.174308 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.174352 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lzb2\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-kube-api-access-4lzb2\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.174377 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8qb8\" (UniqueName: \"kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.175142 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.175340 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.192947 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8qb8\" (UniqueName: \"kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8\") pod \"certified-operators-tn89k\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.275569 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.275644 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lzb2\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-kube-api-access-4lzb2\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.290360 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lzb2\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-kube-api-access-4lzb2\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.290376 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2109ab50-1b14-45a7-a2a9-a415791eddb1-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gml2q\" (UID: \"2109ab50-1b14-45a7-a2a9-a415791eddb1\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.345368 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.405671 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.467525 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-nxlgw"] Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.814090 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:12 crc kubenswrapper[4797]: W0104 12:04:12.816714 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57f776ea_6b7c_45fd_8b41_3a014cbb6bbc.slice/crio-ef94522b8c0fbb897544b48f696526ae7bcac28c53facac222582bf2d5281ea5 WatchSource:0}: Error finding container ef94522b8c0fbb897544b48f696526ae7bcac28c53facac222582bf2d5281ea5: Status 404 returned error can't find the container with id ef94522b8c0fbb897544b48f696526ae7bcac28c53facac222582bf2d5281ea5 Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.883130 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gml2q"] Jan 04 12:04:12 crc kubenswrapper[4797]: W0104 12:04:12.888124 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2109ab50_1b14_45a7_a2a9_a415791eddb1.slice/crio-1995c0dcd7e063bbc09a4e347771300e2e8e25980b2b8c745b40726b50fd31d3 WatchSource:0}: Error finding container 1995c0dcd7e063bbc09a4e347771300e2e8e25980b2b8c745b40726b50fd31d3: Status 404 returned error can't find the container with id 1995c0dcd7e063bbc09a4e347771300e2e8e25980b2b8c745b40726b50fd31d3 Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.949885 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" event={"ID":"2109ab50-1b14-45a7-a2a9-a415791eddb1","Type":"ContainerStarted","Data":"1995c0dcd7e063bbc09a4e347771300e2e8e25980b2b8c745b40726b50fd31d3"} Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.951316 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" event={"ID":"8da1d315-7862-496b-bb8d-0b8a56e7ebe8","Type":"ContainerStarted","Data":"e862d9f7b0800620832a1a4e043ec07157b442205ddf9d64481f909ffa76b8c4"} Jan 04 12:04:12 crc kubenswrapper[4797]: I0104 12:04:12.952361 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerStarted","Data":"ef94522b8c0fbb897544b48f696526ae7bcac28c53facac222582bf2d5281ea5"} Jan 04 12:04:13 crc kubenswrapper[4797]: I0104 12:04:13.964628 4797 generic.go:334] "Generic (PLEG): container finished" podID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerID="5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566" exitCode=0 Jan 04 12:04:13 crc kubenswrapper[4797]: I0104 12:04:13.964850 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerDied","Data":"5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566"} Jan 04 12:04:15 crc kubenswrapper[4797]: I0104 12:04:15.919720 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:15 crc kubenswrapper[4797]: I0104 12:04:15.920752 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:15 crc kubenswrapper[4797]: I0104 12:04:15.963769 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:15 crc kubenswrapper[4797]: I0104 12:04:15.978878 4797 generic.go:334] "Generic (PLEG): container finished" podID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerID="8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc" exitCode=0 Jan 04 12:04:15 crc kubenswrapper[4797]: I0104 12:04:15.978932 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerDied","Data":"8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc"} Jan 04 12:04:16 crc kubenswrapper[4797]: I0104 12:04:16.043719 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:16 crc kubenswrapper[4797]: I0104 12:04:16.962538 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:17 crc kubenswrapper[4797]: I0104 12:04:17.990482 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sn55l" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="registry-server" containerID="cri-o://1b629dc96dcc76e11b20d4f4d6ab0e503ee34e2d018e0094239e34a4049a857b" gracePeriod=2 Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.003426 4797 generic.go:334] "Generic (PLEG): container finished" podID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerID="1b629dc96dcc76e11b20d4f4d6ab0e503ee34e2d018e0094239e34a4049a857b" exitCode=0 Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.003472 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerDied","Data":"1b629dc96dcc76e11b20d4f4d6ab0e503ee34e2d018e0094239e34a4049a857b"} Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.494101 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.494156 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.494196 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.494781 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:04:19 crc kubenswrapper[4797]: I0104 12:04:19.494831 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c" gracePeriod=600 Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.021331 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c" exitCode=0 Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.021399 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c"} Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.021910 4797 scope.go:117] "RemoveContainer" containerID="1f6725f1071a1bad5c4080a18d7f5ff67e458b3d8a6bdba4feddd3d42c63e77f" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.526283 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.703780 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities\") pod \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.703920 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9rl6\" (UniqueName: \"kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6\") pod \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.703957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content\") pod \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\" (UID: \"c4ab2502-862d-43d8-bf7a-434778b1d7b9\") " Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.704850 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities" (OuterVolumeSpecName: "utilities") pod "c4ab2502-862d-43d8-bf7a-434778b1d7b9" (UID: "c4ab2502-862d-43d8-bf7a-434778b1d7b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.716177 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6" (OuterVolumeSpecName: "kube-api-access-b9rl6") pod "c4ab2502-862d-43d8-bf7a-434778b1d7b9" (UID: "c4ab2502-862d-43d8-bf7a-434778b1d7b9"). InnerVolumeSpecName "kube-api-access-b9rl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.729667 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4ab2502-862d-43d8-bf7a-434778b1d7b9" (UID: "c4ab2502-862d-43d8-bf7a-434778b1d7b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.805447 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.805494 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ab2502-862d-43d8-bf7a-434778b1d7b9-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:21 crc kubenswrapper[4797]: I0104 12:04:21.805508 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9rl6\" (UniqueName: \"kubernetes.io/projected/c4ab2502-862d-43d8-bf7a-434778b1d7b9-kube-api-access-b9rl6\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.030678 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerStarted","Data":"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a"} Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.032980 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" event={"ID":"2109ab50-1b14-45a7-a2a9-a415791eddb1","Type":"ContainerStarted","Data":"337d04e97e1aef1429c7282dec204491a13034b22bbc6167df53f762b57e10ad"} Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.034705 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" event={"ID":"8da1d315-7862-496b-bb8d-0b8a56e7ebe8","Type":"ContainerStarted","Data":"d5c04000664711e6f0740ffcc4bf9d6f6c0219b09bac3e980181da2c7eea9817"} Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.035117 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.037454 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766"} Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.039824 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sn55l" event={"ID":"c4ab2502-862d-43d8-bf7a-434778b1d7b9","Type":"ContainerDied","Data":"e57264a8705208b7ac6b0a2f54a124c257b37e23116737dd541455e1958244d0"} Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.039892 4797 scope.go:117] "RemoveContainer" containerID="1b629dc96dcc76e11b20d4f4d6ab0e503ee34e2d018e0094239e34a4049a857b" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.039897 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sn55l" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.068284 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tn89k" podStartSLOduration=3.745081733 podStartE2EDuration="11.068249092s" podCreationTimestamp="2026-01-04 12:04:11 +0000 UTC" firstStartedPulling="2026-01-04 12:04:13.967170069 +0000 UTC m=+952.824356778" lastFinishedPulling="2026-01-04 12:04:21.290337418 +0000 UTC m=+960.147524137" observedRunningTime="2026-01-04 12:04:22.056630997 +0000 UTC m=+960.913817746" watchObservedRunningTime="2026-01-04 12:04:22.068249092 +0000 UTC m=+960.925435881" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.073857 4797 scope.go:117] "RemoveContainer" containerID="8cacfb67c206ab3e8f40b890184c8a3c0bf9136aea56555856476e22463ac7bf" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.103201 4797 scope.go:117] "RemoveContainer" containerID="d0bdc843e72bfbb6bbe8b0bfb2ac223bed20d54b42bfdc2bdc4803a9db2c1781" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.115969 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" podStartSLOduration=2.295885714 podStartE2EDuration="11.115952185s" podCreationTimestamp="2026-01-04 12:04:11 +0000 UTC" firstStartedPulling="2026-01-04 12:04:12.494560035 +0000 UTC m=+951.351746744" lastFinishedPulling="2026-01-04 12:04:21.314626506 +0000 UTC m=+960.171813215" observedRunningTime="2026-01-04 12:04:22.114329292 +0000 UTC m=+960.971516011" watchObservedRunningTime="2026-01-04 12:04:22.115952185 +0000 UTC m=+960.973138904" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.157784 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gml2q" podStartSLOduration=1.703805639 podStartE2EDuration="10.157765983s" podCreationTimestamp="2026-01-04 12:04:12 +0000 UTC" firstStartedPulling="2026-01-04 12:04:12.889465639 +0000 UTC m=+951.746652348" lastFinishedPulling="2026-01-04 12:04:21.343425983 +0000 UTC m=+960.200612692" observedRunningTime="2026-01-04 12:04:22.149774694 +0000 UTC m=+961.006961403" watchObservedRunningTime="2026-01-04 12:04:22.157765983 +0000 UTC m=+961.014952682" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.173877 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.188452 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sn55l"] Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.346051 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:22 crc kubenswrapper[4797]: I0104 12:04:22.346097 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:23 crc kubenswrapper[4797]: I0104 12:04:23.384770 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-tn89k" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="registry-server" probeResult="failure" output=< Jan 04 12:04:23 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 12:04:23 crc kubenswrapper[4797]: > Jan 04 12:04:23 crc kubenswrapper[4797]: I0104 12:04:23.494444 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" path="/var/lib/kubelet/pods/c4ab2502-862d-43d8-bf7a-434778b1d7b9/volumes" Jan 04 12:04:26 crc kubenswrapper[4797]: I0104 12:04:26.934457 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-nxlgw" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.385910 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bjstj"] Jan 04 12:04:29 crc kubenswrapper[4797]: E0104 12:04:29.387503 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="extract-utilities" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.387691 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="extract-utilities" Jan 04 12:04:29 crc kubenswrapper[4797]: E0104 12:04:29.387850 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="registry-server" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.388048 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="registry-server" Jan 04 12:04:29 crc kubenswrapper[4797]: E0104 12:04:29.388197 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="extract-content" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.388337 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="extract-content" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.388674 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ab2502-862d-43d8-bf7a-434778b1d7b9" containerName="registry-server" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.389577 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.398514 4797 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rddfl" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.407813 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bjstj"] Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.517051 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69b84\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-kube-api-access-69b84\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.517152 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-bound-sa-token\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.618302 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-bound-sa-token\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.618380 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69b84\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-kube-api-access-69b84\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.643694 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-bound-sa-token\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.644131 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69b84\" (UniqueName: \"kubernetes.io/projected/f3175674-5f60-4607-9755-5ee1295171c9-kube-api-access-69b84\") pod \"cert-manager-86cb77c54b-bjstj\" (UID: \"f3175674-5f60-4607-9755-5ee1295171c9\") " pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:29 crc kubenswrapper[4797]: I0104 12:04:29.708768 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-bjstj" Jan 04 12:04:30 crc kubenswrapper[4797]: I0104 12:04:30.127302 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bjstj"] Jan 04 12:04:31 crc kubenswrapper[4797]: I0104 12:04:31.116292 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-bjstj" event={"ID":"f3175674-5f60-4607-9755-5ee1295171c9","Type":"ContainerStarted","Data":"63f0fa6fd4be84050aba695a5f62f05c7207ca2141bc879dd8f63c4f09776d79"} Jan 04 12:04:31 crc kubenswrapper[4797]: I0104 12:04:31.116644 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-bjstj" event={"ID":"f3175674-5f60-4607-9755-5ee1295171c9","Type":"ContainerStarted","Data":"6130a9dc8171161cde321de09ed550be8983cf24f6f742ba8f64840362f24cf2"} Jan 04 12:04:31 crc kubenswrapper[4797]: I0104 12:04:31.148612 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-bjstj" podStartSLOduration=2.1485797189999998 podStartE2EDuration="2.148579719s" podCreationTimestamp="2026-01-04 12:04:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:04:31.139723347 +0000 UTC m=+969.996910116" watchObservedRunningTime="2026-01-04 12:04:31.148579719 +0000 UTC m=+970.005766458" Jan 04 12:04:32 crc kubenswrapper[4797]: I0104 12:04:32.404257 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:32 crc kubenswrapper[4797]: I0104 12:04:32.478908 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:32 crc kubenswrapper[4797]: I0104 12:04:32.658583 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:34 crc kubenswrapper[4797]: I0104 12:04:34.138388 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tn89k" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="registry-server" containerID="cri-o://56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a" gracePeriod=2 Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.144435 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.158543 4797 generic.go:334] "Generic (PLEG): container finished" podID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerID="56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a" exitCode=0 Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.158587 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerDied","Data":"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a"} Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.158612 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tn89k" event={"ID":"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc","Type":"ContainerDied","Data":"ef94522b8c0fbb897544b48f696526ae7bcac28c53facac222582bf2d5281ea5"} Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.158611 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tn89k" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.158658 4797 scope.go:117] "RemoveContainer" containerID="56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.200223 4797 scope.go:117] "RemoveContainer" containerID="8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.221743 4797 scope.go:117] "RemoveContainer" containerID="5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.227399 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8qb8\" (UniqueName: \"kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8\") pod \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.227586 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content\") pod \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.227672 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities\") pod \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\" (UID: \"57f776ea-6b7c-45fd-8b41-3a014cbb6bbc\") " Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.229219 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities" (OuterVolumeSpecName: "utilities") pod "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" (UID: "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.235976 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8" (OuterVolumeSpecName: "kube-api-access-f8qb8") pod "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" (UID: "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc"). InnerVolumeSpecName "kube-api-access-f8qb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.270607 4797 scope.go:117] "RemoveContainer" containerID="56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a" Jan 04 12:04:35 crc kubenswrapper[4797]: E0104 12:04:35.271318 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a\": container with ID starting with 56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a not found: ID does not exist" containerID="56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.271376 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a"} err="failed to get container status \"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a\": rpc error: code = NotFound desc = could not find container \"56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a\": container with ID starting with 56c929d1f56885e27eed698d04b807298ec336985800675af907a11d348bad3a not found: ID does not exist" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.271409 4797 scope.go:117] "RemoveContainer" containerID="8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc" Jan 04 12:04:35 crc kubenswrapper[4797]: E0104 12:04:35.271863 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc\": container with ID starting with 8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc not found: ID does not exist" containerID="8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.271932 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc"} err="failed to get container status \"8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc\": rpc error: code = NotFound desc = could not find container \"8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc\": container with ID starting with 8e24128d5ad07667f400b22e33ecac6853916c82bae9bb784edef160c7fcb1fc not found: ID does not exist" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.271973 4797 scope.go:117] "RemoveContainer" containerID="5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566" Jan 04 12:04:35 crc kubenswrapper[4797]: E0104 12:04:35.272359 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566\": container with ID starting with 5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566 not found: ID does not exist" containerID="5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.272410 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566"} err="failed to get container status \"5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566\": rpc error: code = NotFound desc = could not find container \"5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566\": container with ID starting with 5c31563c34c3d7db0708b7ea76becb818719261aab6adbc4eca4c4bc63875566 not found: ID does not exist" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.292853 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" (UID: "57f776ea-6b7c-45fd-8b41-3a014cbb6bbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.329144 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.329186 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.329207 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8qb8\" (UniqueName: \"kubernetes.io/projected/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc-kube-api-access-f8qb8\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.506035 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:35 crc kubenswrapper[4797]: I0104 12:04:35.513675 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tn89k"] Jan 04 12:04:37 crc kubenswrapper[4797]: I0104 12:04:37.488106 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" path="/var/lib/kubelet/pods/57f776ea-6b7c-45fd-8b41-3a014cbb6bbc/volumes" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.649754 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:40 crc kubenswrapper[4797]: E0104 12:04:40.650477 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="registry-server" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.650492 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="registry-server" Jan 04 12:04:40 crc kubenswrapper[4797]: E0104 12:04:40.650510 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="extract-utilities" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.650520 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="extract-utilities" Jan 04 12:04:40 crc kubenswrapper[4797]: E0104 12:04:40.650531 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="extract-content" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.650539 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="extract-content" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.650682 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f776ea-6b7c-45fd-8b41-3a014cbb6bbc" containerName="registry-server" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.651183 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.653292 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-f9fwk" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.654017 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.654452 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.661580 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.806747 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn6sx\" (UniqueName: \"kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx\") pod \"openstack-operator-index-rkc7g\" (UID: \"c0d32c9b-eb41-4790-a814-654fed701d80\") " pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.908326 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn6sx\" (UniqueName: \"kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx\") pod \"openstack-operator-index-rkc7g\" (UID: \"c0d32c9b-eb41-4790-a814-654fed701d80\") " pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.931202 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn6sx\" (UniqueName: \"kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx\") pod \"openstack-operator-index-rkc7g\" (UID: \"c0d32c9b-eb41-4790-a814-654fed701d80\") " pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:40 crc kubenswrapper[4797]: I0104 12:04:40.970480 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:41 crc kubenswrapper[4797]: I0104 12:04:41.409321 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:41 crc kubenswrapper[4797]: W0104 12:04:41.414706 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0d32c9b_eb41_4790_a814_654fed701d80.slice/crio-b4571e70e4082b11c478602053661aa03da66dd7c23b3dde4deac27405465b21 WatchSource:0}: Error finding container b4571e70e4082b11c478602053661aa03da66dd7c23b3dde4deac27405465b21: Status 404 returned error can't find the container with id b4571e70e4082b11c478602053661aa03da66dd7c23b3dde4deac27405465b21 Jan 04 12:04:42 crc kubenswrapper[4797]: I0104 12:04:42.226776 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkc7g" event={"ID":"c0d32c9b-eb41-4790-a814-654fed701d80","Type":"ContainerStarted","Data":"b4571e70e4082b11c478602053661aa03da66dd7c23b3dde4deac27405465b21"} Jan 04 12:04:43 crc kubenswrapper[4797]: I0104 12:04:43.240430 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkc7g" event={"ID":"c0d32c9b-eb41-4790-a814-654fed701d80","Type":"ContainerStarted","Data":"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043"} Jan 04 12:04:43 crc kubenswrapper[4797]: I0104 12:04:43.256703 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rkc7g" podStartSLOduration=2.50192851 podStartE2EDuration="3.256676456s" podCreationTimestamp="2026-01-04 12:04:40 +0000 UTC" firstStartedPulling="2026-01-04 12:04:41.416843714 +0000 UTC m=+980.274030433" lastFinishedPulling="2026-01-04 12:04:42.17159167 +0000 UTC m=+981.028778379" observedRunningTime="2026-01-04 12:04:43.255243578 +0000 UTC m=+982.112430327" watchObservedRunningTime="2026-01-04 12:04:43.256676456 +0000 UTC m=+982.113863175" Jan 04 12:04:44 crc kubenswrapper[4797]: I0104 12:04:44.832208 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.267112 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-rkc7g" podUID="c0d32c9b-eb41-4790-a814-654fed701d80" containerName="registry-server" containerID="cri-o://6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043" gracePeriod=2 Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.645725 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-zz877"] Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.646922 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.654481 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zz877"] Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.689450 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.782366 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn6sx\" (UniqueName: \"kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx\") pod \"c0d32c9b-eb41-4790-a814-654fed701d80\" (UID: \"c0d32c9b-eb41-4790-a814-654fed701d80\") " Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.782650 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bfpk\" (UniqueName: \"kubernetes.io/projected/1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf-kube-api-access-2bfpk\") pod \"openstack-operator-index-zz877\" (UID: \"1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf\") " pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.790841 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx" (OuterVolumeSpecName: "kube-api-access-hn6sx") pod "c0d32c9b-eb41-4790-a814-654fed701d80" (UID: "c0d32c9b-eb41-4790-a814-654fed701d80"). InnerVolumeSpecName "kube-api-access-hn6sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.883681 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bfpk\" (UniqueName: \"kubernetes.io/projected/1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf-kube-api-access-2bfpk\") pod \"openstack-operator-index-zz877\" (UID: \"1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf\") " pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.883785 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn6sx\" (UniqueName: \"kubernetes.io/projected/c0d32c9b-eb41-4790-a814-654fed701d80-kube-api-access-hn6sx\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:45 crc kubenswrapper[4797]: I0104 12:04:45.914604 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bfpk\" (UniqueName: \"kubernetes.io/projected/1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf-kube-api-access-2bfpk\") pod \"openstack-operator-index-zz877\" (UID: \"1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf\") " pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.017205 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.277345 4797 generic.go:334] "Generic (PLEG): container finished" podID="c0d32c9b-eb41-4790-a814-654fed701d80" containerID="6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043" exitCode=0 Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.277390 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkc7g" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.277426 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkc7g" event={"ID":"c0d32c9b-eb41-4790-a814-654fed701d80","Type":"ContainerDied","Data":"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043"} Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.277474 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkc7g" event={"ID":"c0d32c9b-eb41-4790-a814-654fed701d80","Type":"ContainerDied","Data":"b4571e70e4082b11c478602053661aa03da66dd7c23b3dde4deac27405465b21"} Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.277529 4797 scope.go:117] "RemoveContainer" containerID="6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.303598 4797 scope.go:117] "RemoveContainer" containerID="6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043" Jan 04 12:04:46 crc kubenswrapper[4797]: E0104 12:04:46.304389 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043\": container with ID starting with 6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043 not found: ID does not exist" containerID="6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.304439 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043"} err="failed to get container status \"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043\": rpc error: code = NotFound desc = could not find container \"6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043\": container with ID starting with 6464afb902c7b7a15c7e071cd8ff1d920bbfc7c1de722bdd1a1f3361205f0043 not found: ID does not exist" Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.320870 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zz877"] Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.325801 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:46 crc kubenswrapper[4797]: I0104 12:04:46.329396 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-rkc7g"] Jan 04 12:04:46 crc kubenswrapper[4797]: W0104 12:04:46.330466 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c7b6fcc_0ea8_47c5_8b9f_cee89e41faaf.slice/crio-df5afd604537821787e45d40f597a0895d9811f2a175d0a8cae8eb5fbe8340d6 WatchSource:0}: Error finding container df5afd604537821787e45d40f597a0895d9811f2a175d0a8cae8eb5fbe8340d6: Status 404 returned error can't find the container with id df5afd604537821787e45d40f597a0895d9811f2a175d0a8cae8eb5fbe8340d6 Jan 04 12:04:47 crc kubenswrapper[4797]: I0104 12:04:47.288277 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zz877" event={"ID":"1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf","Type":"ContainerStarted","Data":"7a4d2660e1d8c6e4983ddabb014a873256a04de7ee1d77ea16be8206d54155f6"} Jan 04 12:04:47 crc kubenswrapper[4797]: I0104 12:04:47.288360 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zz877" event={"ID":"1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf","Type":"ContainerStarted","Data":"df5afd604537821787e45d40f597a0895d9811f2a175d0a8cae8eb5fbe8340d6"} Jan 04 12:04:47 crc kubenswrapper[4797]: I0104 12:04:47.314573 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-zz877" podStartSLOduration=1.889073442 podStartE2EDuration="2.314540673s" podCreationTimestamp="2026-01-04 12:04:45 +0000 UTC" firstStartedPulling="2026-01-04 12:04:46.335848078 +0000 UTC m=+985.193034787" lastFinishedPulling="2026-01-04 12:04:46.761315279 +0000 UTC m=+985.618502018" observedRunningTime="2026-01-04 12:04:47.309694974 +0000 UTC m=+986.166881713" watchObservedRunningTime="2026-01-04 12:04:47.314540673 +0000 UTC m=+986.171727422" Jan 04 12:04:47 crc kubenswrapper[4797]: I0104 12:04:47.487796 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0d32c9b-eb41-4790-a814-654fed701d80" path="/var/lib/kubelet/pods/c0d32c9b-eb41-4790-a814-654fed701d80/volumes" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.036736 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:04:55 crc kubenswrapper[4797]: E0104 12:04:55.037647 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d32c9b-eb41-4790-a814-654fed701d80" containerName="registry-server" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.037665 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d32c9b-eb41-4790-a814-654fed701d80" containerName="registry-server" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.037890 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0d32c9b-eb41-4790-a814-654fed701d80" containerName="registry-server" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.042821 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.050581 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.128278 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fxvq\" (UniqueName: \"kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.128371 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.128444 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.229848 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fxvq\" (UniqueName: \"kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.230066 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.231587 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.232696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.232778 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.265065 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fxvq\" (UniqueName: \"kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq\") pod \"community-operators-9g2ng\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.373588 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:04:55 crc kubenswrapper[4797]: I0104 12:04:55.608189 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:04:55 crc kubenswrapper[4797]: W0104 12:04:55.613390 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddb44fd9_1f0c_42f9_9fd9_10567c7f6580.slice/crio-e57583514e61bb628403c42be2f8c7deb1acb92ee10175a50ee8cfeff831b337 WatchSource:0}: Error finding container e57583514e61bb628403c42be2f8c7deb1acb92ee10175a50ee8cfeff831b337: Status 404 returned error can't find the container with id e57583514e61bb628403c42be2f8c7deb1acb92ee10175a50ee8cfeff831b337 Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.018026 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.018514 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.052264 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.363708 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerStarted","Data":"157d47e185f163342c6eb9ff4c0bbeb118bff5cf6a192fccfe132ecdc7822be9"} Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.363773 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerStarted","Data":"e57583514e61bb628403c42be2f8c7deb1acb92ee10175a50ee8cfeff831b337"} Jan 04 12:04:56 crc kubenswrapper[4797]: I0104 12:04:56.398358 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-zz877" Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.376559 4797 generic.go:334] "Generic (PLEG): container finished" podID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerID="157d47e185f163342c6eb9ff4c0bbeb118bff5cf6a192fccfe132ecdc7822be9" exitCode=0 Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.376659 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerDied","Data":"157d47e185f163342c6eb9ff4c0bbeb118bff5cf6a192fccfe132ecdc7822be9"} Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.887435 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f"] Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.891017 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.894108 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-xrcbt" Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.906509 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f"] Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.975408 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.975474 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:57 crc kubenswrapper[4797]: I0104 12:04:57.975500 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhj5v\" (UniqueName: \"kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.076629 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.077083 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.077154 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhj5v\" (UniqueName: \"kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.077503 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.078238 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.103278 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhj5v\" (UniqueName: \"kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.227154 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.393548 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerStarted","Data":"f32507428af3a8e0e5872763ed4db023530812e19291ea3580d09eb88eb1c2eb"} Jan 04 12:04:58 crc kubenswrapper[4797]: I0104 12:04:58.716887 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f"] Jan 04 12:04:59 crc kubenswrapper[4797]: I0104 12:04:59.403812 4797 generic.go:334] "Generic (PLEG): container finished" podID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerID="296b3cd15edb8e72169522750f7f927974175a7a1da20c883eeee826de55fe90" exitCode=0 Jan 04 12:04:59 crc kubenswrapper[4797]: I0104 12:04:59.403913 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" event={"ID":"2b48df1b-61fb-4eb0-99f9-2c159b667a3b","Type":"ContainerDied","Data":"296b3cd15edb8e72169522750f7f927974175a7a1da20c883eeee826de55fe90"} Jan 04 12:04:59 crc kubenswrapper[4797]: I0104 12:04:59.404057 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" event={"ID":"2b48df1b-61fb-4eb0-99f9-2c159b667a3b","Type":"ContainerStarted","Data":"61c31338593028f2e4fc91c8dbea94594f6c40756999912bc087491e37875ce5"} Jan 04 12:04:59 crc kubenswrapper[4797]: I0104 12:04:59.408260 4797 generic.go:334] "Generic (PLEG): container finished" podID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerID="f32507428af3a8e0e5872763ed4db023530812e19291ea3580d09eb88eb1c2eb" exitCode=0 Jan 04 12:04:59 crc kubenswrapper[4797]: I0104 12:04:59.408312 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerDied","Data":"f32507428af3a8e0e5872763ed4db023530812e19291ea3580d09eb88eb1c2eb"} Jan 04 12:05:00 crc kubenswrapper[4797]: I0104 12:05:00.418308 4797 generic.go:334] "Generic (PLEG): container finished" podID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerID="2fb0cf19ead28ae25775c1c0ec6da0e22900cad86cc21ac2048df03d7de28551" exitCode=0 Jan 04 12:05:00 crc kubenswrapper[4797]: I0104 12:05:00.418441 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" event={"ID":"2b48df1b-61fb-4eb0-99f9-2c159b667a3b","Type":"ContainerDied","Data":"2fb0cf19ead28ae25775c1c0ec6da0e22900cad86cc21ac2048df03d7de28551"} Jan 04 12:05:00 crc kubenswrapper[4797]: I0104 12:05:00.422967 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerStarted","Data":"92e465ac86d5865dbe18a7c8363bf94b621e271321be25c283e0ab4486d91065"} Jan 04 12:05:00 crc kubenswrapper[4797]: I0104 12:05:00.474650 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9g2ng" podStartSLOduration=3.00177102 podStartE2EDuration="5.474633438s" podCreationTimestamp="2026-01-04 12:04:55 +0000 UTC" firstStartedPulling="2026-01-04 12:04:57.379614785 +0000 UTC m=+996.236801524" lastFinishedPulling="2026-01-04 12:04:59.852477183 +0000 UTC m=+998.709663942" observedRunningTime="2026-01-04 12:05:00.472127131 +0000 UTC m=+999.329313880" watchObservedRunningTime="2026-01-04 12:05:00.474633438 +0000 UTC m=+999.331820157" Jan 04 12:05:01 crc kubenswrapper[4797]: I0104 12:05:01.429831 4797 generic.go:334] "Generic (PLEG): container finished" podID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerID="d5bf7a555142c53df1c4239741bdeae7cfb675c576034f6c247794b3037ef297" exitCode=0 Jan 04 12:05:01 crc kubenswrapper[4797]: I0104 12:05:01.429880 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" event={"ID":"2b48df1b-61fb-4eb0-99f9-2c159b667a3b","Type":"ContainerDied","Data":"d5bf7a555142c53df1c4239741bdeae7cfb675c576034f6c247794b3037ef297"} Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.805144 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.943147 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhj5v\" (UniqueName: \"kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v\") pod \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.943189 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util\") pod \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.943218 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle\") pod \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\" (UID: \"2b48df1b-61fb-4eb0-99f9-2c159b667a3b\") " Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.944290 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle" (OuterVolumeSpecName: "bundle") pod "2b48df1b-61fb-4eb0-99f9-2c159b667a3b" (UID: "2b48df1b-61fb-4eb0-99f9-2c159b667a3b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.951514 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v" (OuterVolumeSpecName: "kube-api-access-nhj5v") pod "2b48df1b-61fb-4eb0-99f9-2c159b667a3b" (UID: "2b48df1b-61fb-4eb0-99f9-2c159b667a3b"). InnerVolumeSpecName "kube-api-access-nhj5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:05:02 crc kubenswrapper[4797]: I0104 12:05:02.963408 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util" (OuterVolumeSpecName: "util") pod "2b48df1b-61fb-4eb0-99f9-2c159b667a3b" (UID: "2b48df1b-61fb-4eb0-99f9-2c159b667a3b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.044437 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhj5v\" (UniqueName: \"kubernetes.io/projected/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-kube-api-access-nhj5v\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.044500 4797 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.044531 4797 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b48df1b-61fb-4eb0-99f9-2c159b667a3b-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.449891 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" event={"ID":"2b48df1b-61fb-4eb0-99f9-2c159b667a3b","Type":"ContainerDied","Data":"61c31338593028f2e4fc91c8dbea94594f6c40756999912bc087491e37875ce5"} Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.449964 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61c31338593028f2e4fc91c8dbea94594f6c40756999912bc087491e37875ce5" Jan 04 12:05:03 crc kubenswrapper[4797]: I0104 12:05:03.449977 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.225482 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv"] Jan 04 12:05:05 crc kubenswrapper[4797]: E0104 12:05:05.226175 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="pull" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.226192 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="pull" Jan 04 12:05:05 crc kubenswrapper[4797]: E0104 12:05:05.226210 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="extract" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.226219 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="extract" Jan 04 12:05:05 crc kubenswrapper[4797]: E0104 12:05:05.226239 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="util" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.226247 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="util" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.226574 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b48df1b-61fb-4eb0-99f9-2c159b667a3b" containerName="extract" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.227145 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.231209 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-5c62v" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.250887 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv"] Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.374091 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.374320 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.386676 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stxxj\" (UniqueName: \"kubernetes.io/projected/c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5-kube-api-access-stxxj\") pod \"openstack-operator-controller-operator-6879547b79-tt2dv\" (UID: \"c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.459258 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.487748 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stxxj\" (UniqueName: \"kubernetes.io/projected/c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5-kube-api-access-stxxj\") pod \"openstack-operator-controller-operator-6879547b79-tt2dv\" (UID: \"c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.506180 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.513597 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stxxj\" (UniqueName: \"kubernetes.io/projected/c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5-kube-api-access-stxxj\") pod \"openstack-operator-controller-operator-6879547b79-tt2dv\" (UID: \"c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:05 crc kubenswrapper[4797]: I0104 12:05:05.555334 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:06 crc kubenswrapper[4797]: I0104 12:05:06.016692 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv"] Jan 04 12:05:06 crc kubenswrapper[4797]: I0104 12:05:06.468358 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" event={"ID":"c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5","Type":"ContainerStarted","Data":"020771aa486be32405c6f6d9c2ccc35871c78978a7a33fbcab21febb00258765"} Jan 04 12:05:07 crc kubenswrapper[4797]: I0104 12:05:07.625056 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:05:08 crc kubenswrapper[4797]: I0104 12:05:08.481622 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9g2ng" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="registry-server" containerID="cri-o://92e465ac86d5865dbe18a7c8363bf94b621e271321be25c283e0ab4486d91065" gracePeriod=2 Jan 04 12:05:09 crc kubenswrapper[4797]: I0104 12:05:09.491383 4797 generic.go:334] "Generic (PLEG): container finished" podID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerID="92e465ac86d5865dbe18a7c8363bf94b621e271321be25c283e0ab4486d91065" exitCode=0 Jan 04 12:05:09 crc kubenswrapper[4797]: I0104 12:05:09.491435 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerDied","Data":"92e465ac86d5865dbe18a7c8363bf94b621e271321be25c283e0ab4486d91065"} Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.410148 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.500568 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g2ng" event={"ID":"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580","Type":"ContainerDied","Data":"e57583514e61bb628403c42be2f8c7deb1acb92ee10175a50ee8cfeff831b337"} Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.500623 4797 scope.go:117] "RemoveContainer" containerID="92e465ac86d5865dbe18a7c8363bf94b621e271321be25c283e0ab4486d91065" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.500649 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g2ng" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.530239 4797 scope.go:117] "RemoveContainer" containerID="f32507428af3a8e0e5872763ed4db023530812e19291ea3580d09eb88eb1c2eb" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.558526 4797 scope.go:117] "RemoveContainer" containerID="157d47e185f163342c6eb9ff4c0bbeb118bff5cf6a192fccfe132ecdc7822be9" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.574901 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities\") pod \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.575382 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content\") pod \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.575652 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fxvq\" (UniqueName: \"kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq\") pod \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\" (UID: \"ddb44fd9-1f0c-42f9-9fd9-10567c7f6580\") " Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.577365 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities" (OuterVolumeSpecName: "utilities") pod "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" (UID: "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.584524 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq" (OuterVolumeSpecName: "kube-api-access-7fxvq") pod "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" (UID: "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580"). InnerVolumeSpecName "kube-api-access-7fxvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.669204 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" (UID: "ddb44fd9-1f0c-42f9-9fd9-10567c7f6580"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.679816 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.679871 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.679909 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fxvq\" (UniqueName: \"kubernetes.io/projected/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580-kube-api-access-7fxvq\") on node \"crc\" DevicePath \"\"" Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.846670 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:05:10 crc kubenswrapper[4797]: I0104 12:05:10.855102 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9g2ng"] Jan 04 12:05:11 crc kubenswrapper[4797]: I0104 12:05:11.482192 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" path="/var/lib/kubelet/pods/ddb44fd9-1f0c-42f9-9fd9-10567c7f6580/volumes" Jan 04 12:05:11 crc kubenswrapper[4797]: I0104 12:05:11.509216 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" event={"ID":"c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5","Type":"ContainerStarted","Data":"327bb1b9d65ecc7b802d998d28c7567c9cbede8235607671522dbc66a871e29f"} Jan 04 12:05:11 crc kubenswrapper[4797]: I0104 12:05:11.509366 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:11 crc kubenswrapper[4797]: I0104 12:05:11.543745 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" podStartSLOduration=2.12770226 podStartE2EDuration="6.543725642s" podCreationTimestamp="2026-01-04 12:05:05 +0000 UTC" firstStartedPulling="2026-01-04 12:05:06.026735578 +0000 UTC m=+1004.883922307" lastFinishedPulling="2026-01-04 12:05:10.44275898 +0000 UTC m=+1009.299945689" observedRunningTime="2026-01-04 12:05:11.541869152 +0000 UTC m=+1010.399055871" watchObservedRunningTime="2026-01-04 12:05:11.543725642 +0000 UTC m=+1010.400912351" Jan 04 12:05:15 crc kubenswrapper[4797]: I0104 12:05:15.559118 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-tt2dv" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.837565 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx"] Jan 04 12:05:33 crc kubenswrapper[4797]: E0104 12:05:33.839022 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="registry-server" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.839044 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="registry-server" Jan 04 12:05:33 crc kubenswrapper[4797]: E0104 12:05:33.839061 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="extract-utilities" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.839070 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="extract-utilities" Jan 04 12:05:33 crc kubenswrapper[4797]: E0104 12:05:33.839089 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="extract-content" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.839104 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="extract-content" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.839229 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddb44fd9-1f0c-42f9-9fd9-10567c7f6580" containerName="registry-server" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.839947 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.842333 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-lhtn5" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.847969 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.849191 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.851548 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-c79f5" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.854773 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.867378 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.886600 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.887768 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.891148 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-bmgcv" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.894306 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.895835 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.900607 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-x9f2w" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.914363 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.923682 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.955858 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm"] Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.956653 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.962563 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-7q88j" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.970468 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kgkc\" (UniqueName: \"kubernetes.io/projected/b6950105-3c91-45a1-ad35-9871a20ed456-kube-api-access-9kgkc\") pod \"heat-operator-controller-manager-658dd65b86-x8kvm\" (UID: \"b6950105-3c91-45a1-ad35-9871a20ed456\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.970715 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dhwx\" (UniqueName: \"kubernetes.io/projected/7dc88e26-12f5-480c-b774-8512e7356ab9-kube-api-access-8dhwx\") pod \"designate-operator-controller-manager-66f8b87655-mq2pt\" (UID: \"7dc88e26-12f5-480c-b774-8512e7356ab9\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.970805 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6bpl\" (UniqueName: \"kubernetes.io/projected/23c167db-cdda-45e6-a380-d2bcec3278aa-kube-api-access-x6bpl\") pod \"barbican-operator-controller-manager-f6f74d6db-rdrvx\" (UID: \"23c167db-cdda-45e6-a380-d2bcec3278aa\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.970890 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twj55\" (UniqueName: \"kubernetes.io/projected/bb443027-9af5-40c8-b7dd-72ed080799be-kube-api-access-twj55\") pod \"glance-operator-controller-manager-7b549fc966-86pbp\" (UID: \"bb443027-9af5-40c8-b7dd-72ed080799be\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.970965 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8tnw\" (UniqueName: \"kubernetes.io/projected/22873120-9025-46b8-9e9e-8cb0764c199e-kube-api-access-j8tnw\") pod \"cinder-operator-controller-manager-78979fc445-6c2kv\" (UID: \"22873120-9025-46b8-9e9e-8cb0764c199e\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:05:33 crc kubenswrapper[4797]: I0104 12:05:33.983400 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.003026 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.004157 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.007021 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-wwkl2" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.014065 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.015133 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.017691 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-ffw4d" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.017876 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.042961 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.043925 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.051738 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-svdvq" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.058112 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.071301 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kgkc\" (UniqueName: \"kubernetes.io/projected/b6950105-3c91-45a1-ad35-9871a20ed456-kube-api-access-9kgkc\") pod \"heat-operator-controller-manager-658dd65b86-x8kvm\" (UID: \"b6950105-3c91-45a1-ad35-9871a20ed456\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.071347 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.075664 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dhwx\" (UniqueName: \"kubernetes.io/projected/7dc88e26-12f5-480c-b774-8512e7356ab9-kube-api-access-8dhwx\") pod \"designate-operator-controller-manager-66f8b87655-mq2pt\" (UID: \"7dc88e26-12f5-480c-b774-8512e7356ab9\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.075938 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwwnl\" (UniqueName: \"kubernetes.io/projected/e0eb23c2-253c-422a-9ad9-736b6a2e7beb-kube-api-access-bwwnl\") pod \"ironic-operator-controller-manager-f99f54bc8-ncp7k\" (UID: \"e0eb23c2-253c-422a-9ad9-736b6a2e7beb\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.076016 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6bpl\" (UniqueName: \"kubernetes.io/projected/23c167db-cdda-45e6-a380-d2bcec3278aa-kube-api-access-x6bpl\") pod \"barbican-operator-controller-manager-f6f74d6db-rdrvx\" (UID: \"23c167db-cdda-45e6-a380-d2bcec3278aa\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.076081 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twj55\" (UniqueName: \"kubernetes.io/projected/bb443027-9af5-40c8-b7dd-72ed080799be-kube-api-access-twj55\") pod \"glance-operator-controller-manager-7b549fc966-86pbp\" (UID: \"bb443027-9af5-40c8-b7dd-72ed080799be\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.076115 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8tnw\" (UniqueName: \"kubernetes.io/projected/22873120-9025-46b8-9e9e-8cb0764c199e-kube-api-access-j8tnw\") pod \"cinder-operator-controller-manager-78979fc445-6c2kv\" (UID: \"22873120-9025-46b8-9e9e-8cb0764c199e\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.076155 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdwrj\" (UniqueName: \"kubernetes.io/projected/7806cbd3-d72f-4b26-83b9-1dee8d7d5489-kube-api-access-hdwrj\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-ftvkh\" (UID: \"7806cbd3-d72f-4b26-83b9-1dee8d7d5489\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.076215 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmpsq\" (UniqueName: \"kubernetes.io/projected/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-kube-api-access-xmpsq\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.082347 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.107811 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8tnw\" (UniqueName: \"kubernetes.io/projected/22873120-9025-46b8-9e9e-8cb0764c199e-kube-api-access-j8tnw\") pod \"cinder-operator-controller-manager-78979fc445-6c2kv\" (UID: \"22873120-9025-46b8-9e9e-8cb0764c199e\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.107901 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-pktt5"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.112233 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6bpl\" (UniqueName: \"kubernetes.io/projected/23c167db-cdda-45e6-a380-d2bcec3278aa-kube-api-access-x6bpl\") pod \"barbican-operator-controller-manager-f6f74d6db-rdrvx\" (UID: \"23c167db-cdda-45e6-a380-d2bcec3278aa\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.115432 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.115726 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kgkc\" (UniqueName: \"kubernetes.io/projected/b6950105-3c91-45a1-ad35-9871a20ed456-kube-api-access-9kgkc\") pod \"heat-operator-controller-manager-658dd65b86-x8kvm\" (UID: \"b6950105-3c91-45a1-ad35-9871a20ed456\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.116019 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.125821 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dhwx\" (UniqueName: \"kubernetes.io/projected/7dc88e26-12f5-480c-b774-8512e7356ab9-kube-api-access-8dhwx\") pod \"designate-operator-controller-manager-66f8b87655-mq2pt\" (UID: \"7dc88e26-12f5-480c-b774-8512e7356ab9\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.126417 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twj55\" (UniqueName: \"kubernetes.io/projected/bb443027-9af5-40c8-b7dd-72ed080799be-kube-api-access-twj55\") pod \"glance-operator-controller-manager-7b549fc966-86pbp\" (UID: \"bb443027-9af5-40c8-b7dd-72ed080799be\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.126841 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-jkvkj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.127545 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.131467 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.131690 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.132975 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-kxd9q" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.137056 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-pktt5"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.141902 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.143169 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.148614 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.151165 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4wmp4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.158875 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.159909 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.163757 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.179058 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.181316 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.187920 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-hfx5j" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.221960 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.223870 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs5qj\" (UniqueName: \"kubernetes.io/projected/2d49544b-5665-46d3-8a14-fad6d8ecf7bb-kube-api-access-vs5qj\") pod \"manila-operator-controller-manager-598945d5b8-fnm8x\" (UID: \"2d49544b-5665-46d3-8a14-fad6d8ecf7bb\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.223931 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwwnl\" (UniqueName: \"kubernetes.io/projected/e0eb23c2-253c-422a-9ad9-736b6a2e7beb-kube-api-access-bwwnl\") pod \"ironic-operator-controller-manager-f99f54bc8-ncp7k\" (UID: \"e0eb23c2-253c-422a-9ad9-736b6a2e7beb\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.224043 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdwrj\" (UniqueName: \"kubernetes.io/projected/7806cbd3-d72f-4b26-83b9-1dee8d7d5489-kube-api-access-hdwrj\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-ftvkh\" (UID: \"7806cbd3-d72f-4b26-83b9-1dee8d7d5489\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.224093 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmpsq\" (UniqueName: \"kubernetes.io/projected/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-kube-api-access-xmpsq\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.224191 4797 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.224263 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert podName:cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf nodeName:}" failed. No retries permitted until 2026-01-04 12:05:34.724245871 +0000 UTC m=+1033.581432580 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert") pod "infra-operator-controller-manager-6d99759cf-ql65x" (UID: "cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.231831 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.232341 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.244796 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.245710 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.249589 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.253611 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-g94mj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.257432 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdwrj\" (UniqueName: \"kubernetes.io/projected/7806cbd3-d72f-4b26-83b9-1dee8d7d5489-kube-api-access-hdwrj\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-ftvkh\" (UID: \"7806cbd3-d72f-4b26-83b9-1dee8d7d5489\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.257430 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmpsq\" (UniqueName: \"kubernetes.io/projected/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-kube-api-access-xmpsq\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.258124 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.258957 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.262146 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwwnl\" (UniqueName: \"kubernetes.io/projected/e0eb23c2-253c-422a-9ad9-736b6a2e7beb-kube-api-access-bwwnl\") pod \"ironic-operator-controller-manager-f99f54bc8-ncp7k\" (UID: \"e0eb23c2-253c-422a-9ad9-736b6a2e7beb\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.267842 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-dctnj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.282144 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.284578 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.289048 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.290858 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.296038 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-wrgn2" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.299774 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.301232 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.304048 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-9jtk9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.304234 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.313914 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.327560 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.333213 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twgpf\" (UniqueName: \"kubernetes.io/projected/f421383f-618c-4c24-80da-28db8ef0723a-kube-api-access-twgpf\") pod \"mariadb-operator-controller-manager-7b88bfc995-8hq46\" (UID: \"f421383f-618c-4c24-80da-28db8ef0723a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.333477 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.333612 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcw6n\" (UniqueName: \"kubernetes.io/projected/b4d1813a-0643-4fff-9bc6-6f065accb1bc-kube-api-access-tcw6n\") pod \"neutron-operator-controller-manager-7cd87b778f-s7lmf\" (UID: \"b4d1813a-0643-4fff-9bc6-6f065accb1bc\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.333916 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chhqm\" (UniqueName: \"kubernetes.io/projected/73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba-kube-api-access-chhqm\") pod \"ovn-operator-controller-manager-bf6d4f946-zk4v4\" (UID: \"73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.334010 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk4xj\" (UniqueName: \"kubernetes.io/projected/c5ea2cde-563f-4d84-a3cf-8292472baaa1-kube-api-access-wk4xj\") pod \"keystone-operator-controller-manager-568985c78-pktt5\" (UID: \"c5ea2cde-563f-4d84-a3cf-8292472baaa1\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.334113 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz6hw\" (UniqueName: \"kubernetes.io/projected/81235795-0c7e-40b3-bbe3-691d627dc863-kube-api-access-tz6hw\") pod \"octavia-operator-controller-manager-68c649d9d-7bmtj\" (UID: \"81235795-0c7e-40b3-bbe3-691d627dc863\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.334215 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j74f\" (UniqueName: \"kubernetes.io/projected/81b3cb8a-4d8c-4484-a935-54870fd8631d-kube-api-access-4j74f\") pod \"nova-operator-controller-manager-5fbbf8b6cc-sw9g8\" (UID: \"81b3cb8a-4d8c-4484-a935-54870fd8631d\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.334329 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs5qj\" (UniqueName: \"kubernetes.io/projected/2d49544b-5665-46d3-8a14-fad6d8ecf7bb-kube-api-access-vs5qj\") pod \"manila-operator-controller-manager-598945d5b8-fnm8x\" (UID: \"2d49544b-5665-46d3-8a14-fad6d8ecf7bb\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.334407 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fhqv\" (UniqueName: \"kubernetes.io/projected/158d06c2-999b-4a0e-b214-b56a428deeb8-kube-api-access-2fhqv\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.340602 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.341983 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.344287 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-xc7bc" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.354511 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs5qj\" (UniqueName: \"kubernetes.io/projected/2d49544b-5665-46d3-8a14-fad6d8ecf7bb-kube-api-access-vs5qj\") pod \"manila-operator-controller-manager-598945d5b8-fnm8x\" (UID: \"2d49544b-5665-46d3-8a14-fad6d8ecf7bb\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.355824 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.356645 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.358747 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-6xzzp" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.367652 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.369315 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.377485 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.383697 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.391376 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.392239 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.403112 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.410874 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-662lb" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435052 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chhqm\" (UniqueName: \"kubernetes.io/projected/73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba-kube-api-access-chhqm\") pod \"ovn-operator-controller-manager-bf6d4f946-zk4v4\" (UID: \"73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435088 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk4xj\" (UniqueName: \"kubernetes.io/projected/c5ea2cde-563f-4d84-a3cf-8292472baaa1-kube-api-access-wk4xj\") pod \"keystone-operator-controller-manager-568985c78-pktt5\" (UID: \"c5ea2cde-563f-4d84-a3cf-8292472baaa1\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435126 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47h8x\" (UniqueName: \"kubernetes.io/projected/fa826769-776f-42e2-ad58-f528ca756f03-kube-api-access-47h8x\") pod \"placement-operator-controller-manager-9b6f8f78c-5npjh\" (UID: \"fa826769-776f-42e2-ad58-f528ca756f03\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435145 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdn4n\" (UniqueName: \"kubernetes.io/projected/8a0379ed-3206-48b0-8822-61cac55ba4cb-kube-api-access-hdn4n\") pod \"swift-operator-controller-manager-bb586bbf4-wlg4f\" (UID: \"8a0379ed-3206-48b0-8822-61cac55ba4cb\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435163 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz6hw\" (UniqueName: \"kubernetes.io/projected/81235795-0c7e-40b3-bbe3-691d627dc863-kube-api-access-tz6hw\") pod \"octavia-operator-controller-manager-68c649d9d-7bmtj\" (UID: \"81235795-0c7e-40b3-bbe3-691d627dc863\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435181 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j74f\" (UniqueName: \"kubernetes.io/projected/81b3cb8a-4d8c-4484-a935-54870fd8631d-kube-api-access-4j74f\") pod \"nova-operator-controller-manager-5fbbf8b6cc-sw9g8\" (UID: \"81b3cb8a-4d8c-4484-a935-54870fd8631d\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435198 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fhqv\" (UniqueName: \"kubernetes.io/projected/158d06c2-999b-4a0e-b214-b56a428deeb8-kube-api-access-2fhqv\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435215 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpp59\" (UniqueName: \"kubernetes.io/projected/0ee7d1ba-194c-4603-887a-0472397bda7c-kube-api-access-fpp59\") pod \"telemetry-operator-controller-manager-68d988df55-bljb9\" (UID: \"0ee7d1ba-194c-4603-887a-0472397bda7c\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435233 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twgpf\" (UniqueName: \"kubernetes.io/projected/f421383f-618c-4c24-80da-28db8ef0723a-kube-api-access-twgpf\") pod \"mariadb-operator-controller-manager-7b88bfc995-8hq46\" (UID: \"f421383f-618c-4c24-80da-28db8ef0723a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435263 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.435287 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcw6n\" (UniqueName: \"kubernetes.io/projected/b4d1813a-0643-4fff-9bc6-6f065accb1bc-kube-api-access-tcw6n\") pod \"neutron-operator-controller-manager-7cd87b778f-s7lmf\" (UID: \"b4d1813a-0643-4fff-9bc6-6f065accb1bc\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.438857 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf"] Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.439572 4797 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.439631 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert podName:158d06c2-999b-4a0e-b214-b56a428deeb8 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:34.939611952 +0000 UTC m=+1033.796798661 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" (UID: "158d06c2-999b-4a0e-b214-b56a428deeb8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.439841 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.450355 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-ms2pm" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.464937 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcw6n\" (UniqueName: \"kubernetes.io/projected/b4d1813a-0643-4fff-9bc6-6f065accb1bc-kube-api-access-tcw6n\") pod \"neutron-operator-controller-manager-7cd87b778f-s7lmf\" (UID: \"b4d1813a-0643-4fff-9bc6-6f065accb1bc\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.468753 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.470624 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz6hw\" (UniqueName: \"kubernetes.io/projected/81235795-0c7e-40b3-bbe3-691d627dc863-kube-api-access-tz6hw\") pod \"octavia-operator-controller-manager-68c649d9d-7bmtj\" (UID: \"81235795-0c7e-40b3-bbe3-691d627dc863\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.471754 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fhqv\" (UniqueName: \"kubernetes.io/projected/158d06c2-999b-4a0e-b214-b56a428deeb8-kube-api-access-2fhqv\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.479125 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j74f\" (UniqueName: \"kubernetes.io/projected/81b3cb8a-4d8c-4484-a935-54870fd8631d-kube-api-access-4j74f\") pod \"nova-operator-controller-manager-5fbbf8b6cc-sw9g8\" (UID: \"81b3cb8a-4d8c-4484-a935-54870fd8631d\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.479872 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk4xj\" (UniqueName: \"kubernetes.io/projected/c5ea2cde-563f-4d84-a3cf-8292472baaa1-kube-api-access-wk4xj\") pod \"keystone-operator-controller-manager-568985c78-pktt5\" (UID: \"c5ea2cde-563f-4d84-a3cf-8292472baaa1\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.482522 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twgpf\" (UniqueName: \"kubernetes.io/projected/f421383f-618c-4c24-80da-28db8ef0723a-kube-api-access-twgpf\") pod \"mariadb-operator-controller-manager-7b88bfc995-8hq46\" (UID: \"f421383f-618c-4c24-80da-28db8ef0723a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.482796 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.484843 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chhqm\" (UniqueName: \"kubernetes.io/projected/73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba-kube-api-access-chhqm\") pod \"ovn-operator-controller-manager-bf6d4f946-zk4v4\" (UID: \"73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.531948 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.532775 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.538205 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47h8x\" (UniqueName: \"kubernetes.io/projected/fa826769-776f-42e2-ad58-f528ca756f03-kube-api-access-47h8x\") pod \"placement-operator-controller-manager-9b6f8f78c-5npjh\" (UID: \"fa826769-776f-42e2-ad58-f528ca756f03\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.538250 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdn4n\" (UniqueName: \"kubernetes.io/projected/8a0379ed-3206-48b0-8822-61cac55ba4cb-kube-api-access-hdn4n\") pod \"swift-operator-controller-manager-bb586bbf4-wlg4f\" (UID: \"8a0379ed-3206-48b0-8822-61cac55ba4cb\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.538287 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpp59\" (UniqueName: \"kubernetes.io/projected/0ee7d1ba-194c-4603-887a-0472397bda7c-kube-api-access-fpp59\") pod \"telemetry-operator-controller-manager-68d988df55-bljb9\" (UID: \"0ee7d1ba-194c-4603-887a-0472397bda7c\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.543113 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-np2g5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.546694 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.578410 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdn4n\" (UniqueName: \"kubernetes.io/projected/8a0379ed-3206-48b0-8822-61cac55ba4cb-kube-api-access-hdn4n\") pod \"swift-operator-controller-manager-bb586bbf4-wlg4f\" (UID: \"8a0379ed-3206-48b0-8822-61cac55ba4cb\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.582532 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.590382 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47h8x\" (UniqueName: \"kubernetes.io/projected/fa826769-776f-42e2-ad58-f528ca756f03-kube-api-access-47h8x\") pod \"placement-operator-controller-manager-9b6f8f78c-5npjh\" (UID: \"fa826769-776f-42e2-ad58-f528ca756f03\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.590931 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.603482 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpp59\" (UniqueName: \"kubernetes.io/projected/0ee7d1ba-194c-4603-887a-0472397bda7c-kube-api-access-fpp59\") pod \"telemetry-operator-controller-manager-68d988df55-bljb9\" (UID: \"0ee7d1ba-194c-4603-887a-0472397bda7c\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.607109 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.618214 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.637851 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.639447 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.640685 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.671392 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.679665 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.679854 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.680092 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-q5vw7" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.697773 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.702405 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.707819 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h4v5\" (UniqueName: \"kubernetes.io/projected/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-kube-api-access-2h4v5\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.707865 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.707970 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.708013 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w99j\" (UniqueName: \"kubernetes.io/projected/5c97e032-2a27-4cd4-bcd7-70d423968689-kube-api-access-6w99j\") pod \"watcher-operator-controller-manager-9dbdf6486-l6drs\" (UID: \"5c97e032-2a27-4cd4-bcd7-70d423968689\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.708079 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5l26\" (UniqueName: \"kubernetes.io/projected/de2f9a17-64b4-4dc6-ab79-9ddc97e1927f-kube-api-access-w5l26\") pod \"test-operator-controller-manager-6c866cfdcb-28htf\" (UID: \"de2f9a17-64b4-4dc6-ab79-9ddc97e1927f\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.735040 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.735914 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.740418 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.741036 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-w8lt6" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.758638 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.764408 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811631 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811683 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w99j\" (UniqueName: \"kubernetes.io/projected/5c97e032-2a27-4cd4-bcd7-70d423968689-kube-api-access-6w99j\") pod \"watcher-operator-controller-manager-9dbdf6486-l6drs\" (UID: \"5c97e032-2a27-4cd4-bcd7-70d423968689\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811724 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811751 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5l26\" (UniqueName: \"kubernetes.io/projected/de2f9a17-64b4-4dc6-ab79-9ddc97e1927f-kube-api-access-w5l26\") pod \"test-operator-controller-manager-6c866cfdcb-28htf\" (UID: \"de2f9a17-64b4-4dc6-ab79-9ddc97e1927f\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811795 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h4v5\" (UniqueName: \"kubernetes.io/projected/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-kube-api-access-2h4v5\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.811814 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.811924 4797 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.811970 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:35.311956676 +0000 UTC m=+1034.169143375 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "metrics-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.812682 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.812710 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:35.312701156 +0000 UTC m=+1034.169887865 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.812872 4797 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: E0104 12:05:34.812894 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert podName:cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf nodeName:}" failed. No retries permitted until 2026-01-04 12:05:35.812887421 +0000 UTC m=+1034.670074130 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert") pod "infra-operator-controller-manager-6d99759cf-ql65x" (UID: "cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.850042 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w99j\" (UniqueName: \"kubernetes.io/projected/5c97e032-2a27-4cd4-bcd7-70d423968689-kube-api-access-6w99j\") pod \"watcher-operator-controller-manager-9dbdf6486-l6drs\" (UID: \"5c97e032-2a27-4cd4-bcd7-70d423968689\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.858328 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5l26\" (UniqueName: \"kubernetes.io/projected/de2f9a17-64b4-4dc6-ab79-9ddc97e1927f-kube-api-access-w5l26\") pod \"test-operator-controller-manager-6c866cfdcb-28htf\" (UID: \"de2f9a17-64b4-4dc6-ab79-9ddc97e1927f\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.865140 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h4v5\" (UniqueName: \"kubernetes.io/projected/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-kube-api-access-2h4v5\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.894693 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.909911 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.912726 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z88w\" (UniqueName: \"kubernetes.io/projected/0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb-kube-api-access-5z88w\") pod \"rabbitmq-cluster-operator-manager-668c99d594-2tb7m\" (UID: \"0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.916214 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv"] Jan 04 12:05:34 crc kubenswrapper[4797]: I0104 12:05:34.950469 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.018192 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z88w\" (UniqueName: \"kubernetes.io/projected/0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb-kube-api-access-5z88w\") pod \"rabbitmq-cluster-operator-manager-668c99d594-2tb7m\" (UID: \"0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.018271 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.018449 4797 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.018507 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert podName:158d06c2-999b-4a0e-b214-b56a428deeb8 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:36.018490372 +0000 UTC m=+1034.875677081 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" (UID: "158d06c2-999b-4a0e-b214-b56a428deeb8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.036102 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z88w\" (UniqueName: \"kubernetes.io/projected/0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb-kube-api-access-5z88w\") pod \"rabbitmq-cluster-operator-manager-668c99d594-2tb7m\" (UID: \"0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.098418 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.108935 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.141051 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.258838 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.267091 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.322973 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.323071 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.323309 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.323386 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:36.323344663 +0000 UTC m=+1035.180531382 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.323855 4797 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.324057 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:36.324041741 +0000 UTC m=+1035.181228450 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "metrics-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.377172 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7806cbd3_d72f_4b26_83b9_1dee8d7d5489.slice/crio-fb01a66baab55f7145842a3a9f5809ef9fa56a4b3bfb5f8344be2296700f7356 WatchSource:0}: Error finding container fb01a66baab55f7145842a3a9f5809ef9fa56a4b3bfb5f8344be2296700f7356: Status 404 returned error can't find the container with id fb01a66baab55f7145842a3a9f5809ef9fa56a4b3bfb5f8344be2296700f7356 Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.413755 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.427355 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x"] Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.428151 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0eb23c2_253c_422a_9ad9_736b6a2e7beb.slice/crio-79088abfc2a8064827cead4765f0354489e5afb266c323afe88ab3ac076ab6ee WatchSource:0}: Error finding container 79088abfc2a8064827cead4765f0354489e5afb266c323afe88ab3ac076ab6ee: Status 404 returned error can't find the container with id 79088abfc2a8064827cead4765f0354489e5afb266c323afe88ab3ac076ab6ee Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.432932 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81235795_0c7e_40b3_bbe3_691d627dc863.slice/crio-cd8974170b802694789d5748795b89cf43c80a6299f5ec9d9fb9319217135730 WatchSource:0}: Error finding container cd8974170b802694789d5748795b89cf43c80a6299f5ec9d9fb9319217135730: Status 404 returned error can't find the container with id cd8974170b802694789d5748795b89cf43c80a6299f5ec9d9fb9319217135730 Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.434570 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.670826 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.678918 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8"] Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.682136 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81b3cb8a_4d8c_4484_a935_54870fd8631d.slice/crio-d6a0af52661907dffa5c58cbfdbaa406b83e933aba577d4c9ba1a826af9d39fa WatchSource:0}: Error finding container d6a0af52661907dffa5c58cbfdbaa406b83e933aba577d4c9ba1a826af9d39fa: Status 404 returned error can't find the container with id d6a0af52661907dffa5c58cbfdbaa406b83e933aba577d4c9ba1a826af9d39fa Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.685502 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.735490 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" event={"ID":"7806cbd3-d72f-4b26-83b9-1dee8d7d5489","Type":"ContainerStarted","Data":"fb01a66baab55f7145842a3a9f5809ef9fa56a4b3bfb5f8344be2296700f7356"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.742156 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" event={"ID":"81235795-0c7e-40b3-bbe3-691d627dc863","Type":"ContainerStarted","Data":"cd8974170b802694789d5748795b89cf43c80a6299f5ec9d9fb9319217135730"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.744181 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" event={"ID":"7dc88e26-12f5-480c-b774-8512e7356ab9","Type":"ContainerStarted","Data":"dc0a3589028963c249a28d40f81de7a776d6ea5af3b2cc2e23b97323fd12e693"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.745460 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" event={"ID":"23c167db-cdda-45e6-a380-d2bcec3278aa","Type":"ContainerStarted","Data":"d06f93648ae88ce6e3b157adf66cdadddd772061abc6be96586794c8ff46bd5b"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.758496 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" event={"ID":"bb443027-9af5-40c8-b7dd-72ed080799be","Type":"ContainerStarted","Data":"3249570b820dd919aa795a190205fe57c306750c1dbc8303d7c3075b6dd778a8"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.762225 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.769301 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.774326 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" event={"ID":"2d49544b-5665-46d3-8a14-fad6d8ecf7bb","Type":"ContainerStarted","Data":"6b83e59723e54ca84431cc5e1811b930228195b9fcb6ad02844d82427e5459ca"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.774380 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.778840 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" event={"ID":"e0eb23c2-253c-422a-9ad9-736b6a2e7beb","Type":"ContainerStarted","Data":"79088abfc2a8064827cead4765f0354489e5afb266c323afe88ab3ac076ab6ee"} Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.782343 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a0379ed_3206_48b0_8822_61cac55ba4cb.slice/crio-31eca2a8b46f18da1926f7f15154940723394e0dcd27c97cb86ba88e1d868581 WatchSource:0}: Error finding container 31eca2a8b46f18da1926f7f15154940723394e0dcd27c97cb86ba88e1d868581: Status 404 returned error can't find the container with id 31eca2a8b46f18da1926f7f15154940723394e0dcd27c97cb86ba88e1d868581 Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.783540 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5ea2cde_563f_4d84_a3cf_8292472baaa1.slice/crio-11dbae3b783864e4ee2f3d33cc7b4fe176d5de4f0d174c5003e599db710edab7 WatchSource:0}: Error finding container 11dbae3b783864e4ee2f3d33cc7b4fe176d5de4f0d174c5003e599db710edab7: Status 404 returned error can't find the container with id 11dbae3b783864e4ee2f3d33cc7b4fe176d5de4f0d174c5003e599db710edab7 Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.787381 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" event={"ID":"fa826769-776f-42e2-ad58-f528ca756f03","Type":"ContainerStarted","Data":"6c1a26ed36599bad0ff1975f5850d1ee42bef915d8a0f27b1102c2098d51b16e"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.790673 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" event={"ID":"22873120-9025-46b8-9e9e-8cb0764c199e","Type":"ContainerStarted","Data":"3cd357e69340d16f6dbfe53fc73f0a69a475ff2803bf83dfe953f73dfe7d6d06"} Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.790708 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-pktt5"] Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.810625 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tcw6n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-7cd87b778f-s7lmf_openstack-operators(b4d1813a-0643-4fff-9bc6-6f065accb1bc): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.810822 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" event={"ID":"f421383f-618c-4c24-80da-28db8ef0723a","Type":"ContainerStarted","Data":"93fb2744a37fb3defcf96418df39ecc5cd24419c76ad814b68aca6b2e659441c"} Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.812473 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" podUID="b4d1813a-0643-4fff-9bc6-6f065accb1bc" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.814243 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" event={"ID":"81b3cb8a-4d8c-4484-a935-54870fd8631d","Type":"ContainerStarted","Data":"d6a0af52661907dffa5c58cbfdbaa406b83e933aba577d4c9ba1a826af9d39fa"} Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.808878 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fpp59,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-68d988df55-bljb9_openstack-operators(0ee7d1ba-194c-4603-887a-0472397bda7c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.820382 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" event={"ID":"b6950105-3c91-45a1-ad35-9871a20ed456","Type":"ContainerStarted","Data":"2ab763d9606f56f0f8128da0b00c11646a192adbe3b972c6845650fd00b9f85b"} Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.822233 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" podUID="0ee7d1ba-194c-4603-887a-0472397bda7c" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.825590 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-chhqm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-bf6d4f946-zk4v4_openstack-operators(73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.826686 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podUID="73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.833917 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.837360 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.837520 4797 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.837650 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert podName:cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf nodeName:}" failed. No retries permitted until 2026-01-04 12:05:37.837571526 +0000 UTC m=+1036.694758235 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert") pod "infra-operator-controller-manager-6d99759cf-ql65x" (UID: "cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.913778 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m"] Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.919352 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf"] Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.928496 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w5l26,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6c866cfdcb-28htf_openstack-operators(de2f9a17-64b4-4dc6-ab79-9ddc97e1927f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.929609 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" podUID="de2f9a17-64b4-4dc6-ab79-9ddc97e1927f" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.934235 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5z88w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-2tb7m_openstack-operators(0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.935792 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podUID="0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb" Jan 04 12:05:35 crc kubenswrapper[4797]: I0104 12:05:35.937471 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs"] Jan 04 12:05:35 crc kubenswrapper[4797]: W0104 12:05:35.946892 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c97e032_2a27_4cd4_bcd7_70d423968689.slice/crio-859d48fa7051974a43c70de50b35e7aa804aa4252cb2a3a05016491be216da70 WatchSource:0}: Error finding container 859d48fa7051974a43c70de50b35e7aa804aa4252cb2a3a05016491be216da70: Status 404 returned error can't find the container with id 859d48fa7051974a43c70de50b35e7aa804aa4252cb2a3a05016491be216da70 Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.952739 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6w99j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-9dbdf6486-l6drs_openstack-operators(5c97e032-2a27-4cd4-bcd7-70d423968689): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:35 crc kubenswrapper[4797]: E0104 12:05:35.954052 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podUID="5c97e032-2a27-4cd4-bcd7-70d423968689" Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.040962 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.041212 4797 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.041317 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert podName:158d06c2-999b-4a0e-b214-b56a428deeb8 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:38.041295496 +0000 UTC m=+1036.898482205 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" (UID: "158d06c2-999b-4a0e-b214-b56a428deeb8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.354419 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.354520 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.354636 4797 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.354685 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:38.354668845 +0000 UTC m=+1037.211855544 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "metrics-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.354742 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.354810 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:38.354794148 +0000 UTC m=+1037.211980857 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.852367 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" event={"ID":"0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb","Type":"ContainerStarted","Data":"71757f59ed12432d8e644f4928dfba4a63e8251c9711d753c4f54d8623f80605"} Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.856314 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podUID="0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb" Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.856433 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" event={"ID":"c5ea2cde-563f-4d84-a3cf-8292472baaa1","Type":"ContainerStarted","Data":"11dbae3b783864e4ee2f3d33cc7b4fe176d5de4f0d174c5003e599db710edab7"} Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.858796 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" event={"ID":"8a0379ed-3206-48b0-8822-61cac55ba4cb","Type":"ContainerStarted","Data":"31eca2a8b46f18da1926f7f15154940723394e0dcd27c97cb86ba88e1d868581"} Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.861338 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" event={"ID":"de2f9a17-64b4-4dc6-ab79-9ddc97e1927f","Type":"ContainerStarted","Data":"75dc722020e6eb24bb54af4307b408b77644cd093e95677ba26a940a65d97b6a"} Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.865730 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" event={"ID":"73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba","Type":"ContainerStarted","Data":"2ab7878ca1a673680102efb2047a050b78b4db256c1112082273d5051872c255"} Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.866144 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6\\\"\"" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" podUID="de2f9a17-64b4-4dc6-ab79-9ddc97e1927f" Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.866825 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podUID="73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba" Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.868599 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" event={"ID":"5c97e032-2a27-4cd4-bcd7-70d423968689","Type":"ContainerStarted","Data":"859d48fa7051974a43c70de50b35e7aa804aa4252cb2a3a05016491be216da70"} Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.870055 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podUID="5c97e032-2a27-4cd4-bcd7-70d423968689" Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.870460 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" event={"ID":"0ee7d1ba-194c-4603-887a-0472397bda7c","Type":"ContainerStarted","Data":"e5a0f1fb81871c1c4acf03720fe1ab4b001f87946acccde1246660caa573cd87"} Jan 04 12:05:36 crc kubenswrapper[4797]: I0104 12:05:36.871867 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" event={"ID":"b4d1813a-0643-4fff-9bc6-6f065accb1bc","Type":"ContainerStarted","Data":"dec32aa102bba0e8d4089429d23214a9561e013bf83ff39557cd6847bc49f7e2"} Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.875941 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" podUID="0ee7d1ba-194c-4603-887a-0472397bda7c" Jan 04 12:05:36 crc kubenswrapper[4797]: E0104 12:05:36.878409 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" podUID="b4d1813a-0643-4fff-9bc6-6f065accb1bc" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.877932 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" podUID="0ee7d1ba-194c-4603-887a-0472397bda7c" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878006 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" podUID="b4d1813a-0643-4fff-9bc6-6f065accb1bc" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878219 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6\\\"\"" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" podUID="de2f9a17-64b4-4dc6-ab79-9ddc97e1927f" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878265 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podUID="73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878300 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podUID="0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878774 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podUID="5c97e032-2a27-4cd4-bcd7-70d423968689" Jan 04 12:05:37 crc kubenswrapper[4797]: I0104 12:05:37.878834 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878943 4797 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:37 crc kubenswrapper[4797]: E0104 12:05:37.878977 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert podName:cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf nodeName:}" failed. No retries permitted until 2026-01-04 12:05:41.878963541 +0000 UTC m=+1040.736150240 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert") pod "infra-operator-controller-manager-6d99759cf-ql65x" (UID: "cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: I0104 12:05:38.083171 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.083365 4797 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.083448 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert podName:158d06c2-999b-4a0e-b214-b56a428deeb8 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.083430591 +0000 UTC m=+1040.940617300 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" (UID: "158d06c2-999b-4a0e-b214-b56a428deeb8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: I0104 12:05:38.394034 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:38 crc kubenswrapper[4797]: I0104 12:05:38.394141 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.394223 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.394265 4797 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.394295 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.394277133 +0000 UTC m=+1041.251463842 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:38 crc kubenswrapper[4797]: E0104 12:05:38.394311 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.394305444 +0000 UTC m=+1041.251492153 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "metrics-server-cert" not found Jan 04 12:05:41 crc kubenswrapper[4797]: I0104 12:05:41.947500 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:41 crc kubenswrapper[4797]: E0104 12:05:41.947651 4797 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:41 crc kubenswrapper[4797]: E0104 12:05:41.947967 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert podName:cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf nodeName:}" failed. No retries permitted until 2026-01-04 12:05:49.947941684 +0000 UTC m=+1048.805128403 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert") pod "infra-operator-controller-manager-6d99759cf-ql65x" (UID: "cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: I0104 12:05:42.151356 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.151581 4797 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.151721 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert podName:158d06c2-999b-4a0e-b214-b56a428deeb8 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:50.151692376 +0000 UTC m=+1049.008879115 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" (UID: "158d06c2-999b-4a0e-b214-b56a428deeb8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: I0104 12:05:42.459526 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:42 crc kubenswrapper[4797]: I0104 12:05:42.459647 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.459700 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.459788 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:50.459766023 +0000 UTC m=+1049.316952732 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.459793 4797 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[4797]: E0104 12:05:42.459845 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:50.459830975 +0000 UTC m=+1049.317017684 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "metrics-server-cert" not found Jan 04 12:05:49 crc kubenswrapper[4797]: I0104 12:05:49.989332 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:49 crc kubenswrapper[4797]: I0104 12:05:49.996338 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf-cert\") pod \"infra-operator-controller-manager-6d99759cf-ql65x\" (UID: \"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.193115 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.197789 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/158d06c2-999b-4a0e-b214-b56a428deeb8-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88\" (UID: \"158d06c2-999b-4a0e-b214-b56a428deeb8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.244118 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.246525 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.497875 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.497974 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:50 crc kubenswrapper[4797]: E0104 12:05:50.498168 4797 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:50 crc kubenswrapper[4797]: E0104 12:05:50.498235 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs podName:cf7b4084-7aaf-42c6-9cdc-656863de1ed7 nodeName:}" failed. No retries permitted until 2026-01-04 12:06:06.498215762 +0000 UTC m=+1065.355402471 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-97vck" (UID: "cf7b4084-7aaf-42c6-9cdc-656863de1ed7") : secret "webhook-server-cert" not found Jan 04 12:05:50 crc kubenswrapper[4797]: I0104 12:05:50.501458 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.346009 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.346435 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hdn4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bb586bbf4-wlg4f_openstack-operators(8a0379ed-3206-48b0-8822-61cac55ba4cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.347773 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" podUID="8a0379ed-3206-48b0-8822-61cac55ba4cb" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.927835 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.928075 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4j74f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5fbbf8b6cc-sw9g8_openstack-operators(81b3cb8a-4d8c-4484-a935-54870fd8631d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.929280 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" podUID="81b3cb8a-4d8c-4484-a935-54870fd8631d" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.996535 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" podUID="8a0379ed-3206-48b0-8822-61cac55ba4cb" Jan 04 12:05:51 crc kubenswrapper[4797]: E0104 12:05:51.997097 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" podUID="81b3cb8a-4d8c-4484-a935-54870fd8631d" Jan 04 12:05:52 crc kubenswrapper[4797]: E0104 12:05:52.492779 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c" Jan 04 12:05:52 crc kubenswrapper[4797]: E0104 12:05:52.492980 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wk4xj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-568985c78-pktt5_openstack-operators(c5ea2cde-563f-4d84-a3cf-8292472baaa1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:52 crc kubenswrapper[4797]: E0104 12:05:52.494207 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" podUID="c5ea2cde-563f-4d84-a3cf-8292472baaa1" Jan 04 12:05:53 crc kubenswrapper[4797]: E0104 12:05:53.002301 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" podUID="c5ea2cde-563f-4d84-a3cf-8292472baaa1" Jan 04 12:06:03 crc kubenswrapper[4797]: E0104 12:06:03.217250 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a" Jan 04 12:06:03 crc kubenswrapper[4797]: E0104 12:06:03.217818 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6w99j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-9dbdf6486-l6drs_openstack-operators(5c97e032-2a27-4cd4-bcd7-70d423968689): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:03 crc kubenswrapper[4797]: E0104 12:06:03.218922 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podUID="5c97e032-2a27-4cd4-bcd7-70d423968689" Jan 04 12:06:03 crc kubenswrapper[4797]: I0104 12:06:03.660202 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x"] Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.078831 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.079090 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-chhqm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-bf6d4f946-zk4v4_openstack-operators(73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.080519 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podUID="73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba" Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.519841 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.520223 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5z88w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-2tb7m_openstack-operators(0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:04 crc kubenswrapper[4797]: E0104 12:06:04.521477 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podUID="0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb" Jan 04 12:06:04 crc kubenswrapper[4797]: I0104 12:06:04.733560 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88"] Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.102307 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" event={"ID":"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf","Type":"ContainerStarted","Data":"78c44bc7e628d8a22796373b700708f26cd66462b5e5fb75641ce7983b19de3f"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.109252 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" event={"ID":"22873120-9025-46b8-9e9e-8cb0764c199e","Type":"ContainerStarted","Data":"b78edfa06b1fac89b358c5d072c345f1f8f207afa21142296d45dc4cc3d52f25"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.109931 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.113110 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" event={"ID":"81235795-0c7e-40b3-bbe3-691d627dc863","Type":"ContainerStarted","Data":"23d83d2798818a7d7fb104a3dba049ac920e5b6b4aaeef176c3fd8a6029c561d"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.113481 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.114709 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" event={"ID":"23c167db-cdda-45e6-a380-d2bcec3278aa","Type":"ContainerStarted","Data":"2dbea0fdbd475831691987b2b32696a2ec1289ae0e847ab962a383f601f5476a"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.115065 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.115966 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" event={"ID":"bb443027-9af5-40c8-b7dd-72ed080799be","Type":"ContainerStarted","Data":"8665c8badb38a1b2814e96c442badcf5a9e7a775ab5971af65f04cdf882e4349"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.116304 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.128460 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" event={"ID":"de2f9a17-64b4-4dc6-ab79-9ddc97e1927f","Type":"ContainerStarted","Data":"75dff3a8cd5a3f9fae347a99908d53f19681b7f9c292864661ce3081a10d0db0"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.129037 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.133103 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" podStartSLOduration=15.234827238 podStartE2EDuration="32.13309113s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.007599021 +0000 UTC m=+1033.864785730" lastFinishedPulling="2026-01-04 12:05:51.905862913 +0000 UTC m=+1050.763049622" observedRunningTime="2026-01-04 12:06:05.129981187 +0000 UTC m=+1063.987167896" watchObservedRunningTime="2026-01-04 12:06:05.13309113 +0000 UTC m=+1063.990277839" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.138908 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" event={"ID":"0ee7d1ba-194c-4603-887a-0472397bda7c","Type":"ContainerStarted","Data":"91f32f5cf298955b6c72e4b81137cd5abc4fa3074fa8d6321358751da2202c3a"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.139469 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.157766 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" podStartSLOduration=7.985571321 podStartE2EDuration="31.157750579s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.4355745 +0000 UTC m=+1034.292761209" lastFinishedPulling="2026-01-04 12:05:58.607753748 +0000 UTC m=+1057.464940467" observedRunningTime="2026-01-04 12:06:05.153981148 +0000 UTC m=+1064.011167847" watchObservedRunningTime="2026-01-04 12:06:05.157750579 +0000 UTC m=+1064.014937288" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.184091 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" event={"ID":"7dc88e26-12f5-480c-b774-8512e7356ab9","Type":"ContainerStarted","Data":"5a1f1817f63941ba5eb7d11300be72ad3fc2a27a3b58ed654714d6c84612a1a7"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.184296 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" podStartSLOduration=14.68404375 podStartE2EDuration="32.184282238s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:34.968408384 +0000 UTC m=+1033.825595093" lastFinishedPulling="2026-01-04 12:05:52.468646872 +0000 UTC m=+1051.325833581" observedRunningTime="2026-01-04 12:06:05.182567722 +0000 UTC m=+1064.039754431" watchObservedRunningTime="2026-01-04 12:06:05.184282238 +0000 UTC m=+1064.041468947" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.184728 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.208203 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" event={"ID":"f421383f-618c-4c24-80da-28db8ef0723a","Type":"ContainerStarted","Data":"3bd244c17135c77b32379cdfb66a9bc800895f9c723cc1fcd7a57c3837b49ca9"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.208816 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.220468 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" event={"ID":"7806cbd3-d72f-4b26-83b9-1dee8d7d5489","Type":"ContainerStarted","Data":"09a0471cb6764aea26d6d329395cbe9e7d1ac06c9fdf91b69af972e2ed71cda4"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.221698 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.228277 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" podStartSLOduration=14.788402057 podStartE2EDuration="32.228266542s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.02815084 +0000 UTC m=+1033.885337549" lastFinishedPulling="2026-01-04 12:05:52.468015325 +0000 UTC m=+1051.325202034" observedRunningTime="2026-01-04 12:06:05.227534543 +0000 UTC m=+1064.084721252" watchObservedRunningTime="2026-01-04 12:06:05.228266542 +0000 UTC m=+1064.085453251" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.239822 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" event={"ID":"b4d1813a-0643-4fff-9bc6-6f065accb1bc","Type":"ContainerStarted","Data":"ee796aef2ef8dfdb227f23366cb59ec78cb87ca0e98367fe3a9228e6c73b3b16"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.240402 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.255181 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" event={"ID":"158d06c2-999b-4a0e-b214-b56a428deeb8","Type":"ContainerStarted","Data":"0c3013a95c64ac7e0f4a636c10062bd0fe13b449446b45cd27e027f91e76212b"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.257220 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" podStartSLOduration=2.622616951 podStartE2EDuration="31.257206265s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.92836235 +0000 UTC m=+1034.785549059" lastFinishedPulling="2026-01-04 12:06:04.562951624 +0000 UTC m=+1063.420138373" observedRunningTime="2026-01-04 12:06:05.25326661 +0000 UTC m=+1064.110453319" watchObservedRunningTime="2026-01-04 12:06:05.257206265 +0000 UTC m=+1064.114392974" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.262858 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" event={"ID":"2d49544b-5665-46d3-8a14-fad6d8ecf7bb","Type":"ContainerStarted","Data":"c1a25d09c450229091061ec9fde8d367f8b08eeced3863bedc67991f16e99be4"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.263514 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.275704 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" event={"ID":"b6950105-3c91-45a1-ad35-9871a20ed456","Type":"ContainerStarted","Data":"61dad05665f7790cb9142e395477b141408d887dd4cecd2e13b9fbdff9928f13"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.276381 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.287944 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" event={"ID":"e0eb23c2-253c-422a-9ad9-736b6a2e7beb","Type":"ContainerStarted","Data":"fb8c476ca777cf147cacbec0466fe34c65b64b61673e7a93a4cc3bca18f6afe2"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.288611 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.316265 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" event={"ID":"fa826769-776f-42e2-ad58-f528ca756f03","Type":"ContainerStarted","Data":"b71d4f506a73f999cbf601e78702b1cf5cd824a2d345e90a60bed1027ce7d89c"} Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.317023 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.356989 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" podStartSLOduration=2.622926239 podStartE2EDuration="31.356970189s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.806098035 +0000 UTC m=+1034.663284744" lastFinishedPulling="2026-01-04 12:06:04.540141985 +0000 UTC m=+1063.397328694" observedRunningTime="2026-01-04 12:06:05.353552048 +0000 UTC m=+1064.210738777" watchObservedRunningTime="2026-01-04 12:06:05.356970189 +0000 UTC m=+1064.214156898" Jan 04 12:06:05 crc kubenswrapper[4797]: I0104 12:06:05.494146 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" podStartSLOduration=15.288998819 podStartE2EDuration="31.494122651s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.70069422 +0000 UTC m=+1034.557880939" lastFinishedPulling="2026-01-04 12:05:51.905818052 +0000 UTC m=+1050.763004771" observedRunningTime="2026-01-04 12:06:05.400911723 +0000 UTC m=+1064.258098432" watchObservedRunningTime="2026-01-04 12:06:05.494122651 +0000 UTC m=+1064.351309360" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.080750 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" podStartSLOduration=16.494872797 podStartE2EDuration="33.080735057s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.319954242 +0000 UTC m=+1034.177140951" lastFinishedPulling="2026-01-04 12:05:51.905816502 +0000 UTC m=+1050.763003211" observedRunningTime="2026-01-04 12:06:05.653655111 +0000 UTC m=+1064.510841820" watchObservedRunningTime="2026-01-04 12:06:06.080735057 +0000 UTC m=+1064.937921766" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.085251 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" podStartSLOduration=16.000117995 podStartE2EDuration="33.085229147s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.382888263 +0000 UTC m=+1034.240074982" lastFinishedPulling="2026-01-04 12:05:52.467999425 +0000 UTC m=+1051.325186134" observedRunningTime="2026-01-04 12:06:06.076464933 +0000 UTC m=+1064.933651642" watchObservedRunningTime="2026-01-04 12:06:06.085229147 +0000 UTC m=+1064.942415866" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.184375 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" podStartSLOduration=3.432755185 podStartE2EDuration="32.184354874s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.810068001 +0000 UTC m=+1034.667254710" lastFinishedPulling="2026-01-04 12:06:04.5616677 +0000 UTC m=+1063.418854399" observedRunningTime="2026-01-04 12:06:06.182211777 +0000 UTC m=+1065.039398486" watchObservedRunningTime="2026-01-04 12:06:06.184354874 +0000 UTC m=+1065.041541583" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.213389 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" podStartSLOduration=15.715262778 podStartE2EDuration="33.213373729s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.434138562 +0000 UTC m=+1034.291325271" lastFinishedPulling="2026-01-04 12:05:52.932249513 +0000 UTC m=+1051.789436222" observedRunningTime="2026-01-04 12:06:06.21155649 +0000 UTC m=+1065.068743199" watchObservedRunningTime="2026-01-04 12:06:06.213373729 +0000 UTC m=+1065.070560438" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.277275 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" podStartSLOduration=15.03811428 podStartE2EDuration="32.277254965s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.701285836 +0000 UTC m=+1034.558472545" lastFinishedPulling="2026-01-04 12:05:52.940426511 +0000 UTC m=+1051.797613230" observedRunningTime="2026-01-04 12:06:06.244051808 +0000 UTC m=+1065.101238517" watchObservedRunningTime="2026-01-04 12:06:06.277254965 +0000 UTC m=+1065.134441664" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.286807 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" podStartSLOduration=15.994725011 podStartE2EDuration="33.28679178s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.176117011 +0000 UTC m=+1034.033303720" lastFinishedPulling="2026-01-04 12:05:52.46818378 +0000 UTC m=+1051.325370489" observedRunningTime="2026-01-04 12:06:06.285147876 +0000 UTC m=+1065.142334585" watchObservedRunningTime="2026-01-04 12:06:06.28679178 +0000 UTC m=+1065.143978489" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.340343 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" event={"ID":"c5ea2cde-563f-4d84-a3cf-8292472baaa1","Type":"ContainerStarted","Data":"a0df1146698aab6959c6b701ffb4b1dbef43b51e11dc701e96088bc682984b63"} Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.341074 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.346449 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" event={"ID":"8a0379ed-3206-48b0-8822-61cac55ba4cb","Type":"ContainerStarted","Data":"21e927c4285a2c84e6bc052b3ecd673b627adeec8dcb91c45f56ff700c5fce5b"} Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.346804 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.388751 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" podStartSLOduration=4.098599741 podStartE2EDuration="33.388733332s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.794069914 +0000 UTC m=+1034.651256633" lastFinishedPulling="2026-01-04 12:06:05.084203515 +0000 UTC m=+1063.941390224" observedRunningTime="2026-01-04 12:06:06.385902656 +0000 UTC m=+1065.243089365" watchObservedRunningTime="2026-01-04 12:06:06.388733332 +0000 UTC m=+1065.245920031" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.399001 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" podStartSLOduration=16.364754112 podStartE2EDuration="33.398964485s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.434377698 +0000 UTC m=+1034.291564407" lastFinishedPulling="2026-01-04 12:05:52.468588071 +0000 UTC m=+1051.325774780" observedRunningTime="2026-01-04 12:06:06.343774331 +0000 UTC m=+1065.200961040" watchObservedRunningTime="2026-01-04 12:06:06.398964485 +0000 UTC m=+1065.256151194" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.510768 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" podStartSLOduration=3.509434403 podStartE2EDuration="32.510751861s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.794075254 +0000 UTC m=+1034.651261963" lastFinishedPulling="2026-01-04 12:06:04.795392712 +0000 UTC m=+1063.652579421" observedRunningTime="2026-01-04 12:06:06.448749015 +0000 UTC m=+1065.305935724" watchObservedRunningTime="2026-01-04 12:06:06.510751861 +0000 UTC m=+1065.367938570" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.571330 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.576705 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cf7b4084-7aaf-42c6-9cdc-656863de1ed7-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-97vck\" (UID: \"cf7b4084-7aaf-42c6-9cdc-656863de1ed7\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:06:06 crc kubenswrapper[4797]: I0104 12:06:06.822572 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:06:07 crc kubenswrapper[4797]: I0104 12:06:07.362145 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" event={"ID":"81b3cb8a-4d8c-4484-a935-54870fd8631d","Type":"ContainerStarted","Data":"0cbd6a28481a50b282f11976c8a1c375baf71615fe3cff9129afc3bf36145066"} Jan 04 12:06:07 crc kubenswrapper[4797]: I0104 12:06:07.383093 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" podStartSLOduration=2.149934438 podStartE2EDuration="33.383077647s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.701288146 +0000 UTC m=+1034.558474855" lastFinishedPulling="2026-01-04 12:06:06.934431355 +0000 UTC m=+1065.791618064" observedRunningTime="2026-01-04 12:06:07.381891235 +0000 UTC m=+1066.239077944" watchObservedRunningTime="2026-01-04 12:06:07.383077647 +0000 UTC m=+1066.240264356" Jan 04 12:06:07 crc kubenswrapper[4797]: I0104 12:06:07.396418 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck"] Jan 04 12:06:08 crc kubenswrapper[4797]: I0104 12:06:08.371926 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" event={"ID":"cf7b4084-7aaf-42c6-9cdc-656863de1ed7","Type":"ContainerStarted","Data":"7410d56cf97c5e6915ccc2b6ac69de03b21ca8f3f47989d4b760a75cdb05f093"} Jan 04 12:06:08 crc kubenswrapper[4797]: I0104 12:06:08.372535 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" event={"ID":"cf7b4084-7aaf-42c6-9cdc-656863de1ed7","Type":"ContainerStarted","Data":"6a787506b16ddf347d24ea8cc0a34ec9f625269459de2c0d52c7f23ca2b748c6"} Jan 04 12:06:08 crc kubenswrapper[4797]: I0104 12:06:08.372558 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:06:08 crc kubenswrapper[4797]: I0104 12:06:08.424697 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" podStartSLOduration=34.424677483 podStartE2EDuration="34.424677483s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:06:08.422592897 +0000 UTC m=+1067.279779626" watchObservedRunningTime="2026-01-04 12:06:08.424677483 +0000 UTC m=+1067.281864212" Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.406239 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" event={"ID":"cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf","Type":"ContainerStarted","Data":"bc7e6e5c5a68dd54a6254957f0ec1ad79d1cbbe55538b2ad17af717031c68cdb"} Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.406879 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.409528 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" event={"ID":"158d06c2-999b-4a0e-b214-b56a428deeb8","Type":"ContainerStarted","Data":"b69d8e28d394c2c1a5ec3849c6fcab80d467d46d985f160ae818989cfbff5f0b"} Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.409684 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.430653 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" podStartSLOduration=32.217947447 podStartE2EDuration="38.430637518s" podCreationTimestamp="2026-01-04 12:05:33 +0000 UTC" firstStartedPulling="2026-01-04 12:06:04.525624228 +0000 UTC m=+1063.382810937" lastFinishedPulling="2026-01-04 12:06:10.738314299 +0000 UTC m=+1069.595501008" observedRunningTime="2026-01-04 12:06:11.427578906 +0000 UTC m=+1070.284765645" watchObservedRunningTime="2026-01-04 12:06:11.430637518 +0000 UTC m=+1070.287824227" Jan 04 12:06:11 crc kubenswrapper[4797]: I0104 12:06:11.471936 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" podStartSLOduration=31.510736506 podStartE2EDuration="37.47190859s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:06:04.773103837 +0000 UTC m=+1063.630290546" lastFinishedPulling="2026-01-04 12:06:10.734275921 +0000 UTC m=+1069.591462630" observedRunningTime="2026-01-04 12:06:11.459412126 +0000 UTC m=+1070.316598875" watchObservedRunningTime="2026-01-04 12:06:11.47190859 +0000 UTC m=+1070.329095329" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.168445 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-rdrvx" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.185766 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-6c2kv" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.239797 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-mq2pt" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.250955 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-86pbp" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.295650 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-x8kvm" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.338173 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-ftvkh" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.375596 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-ncp7k" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.486437 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-fnm8x" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.586726 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-8hq46" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.593222 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-s7lmf" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.609027 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.612002 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-sw9g8" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.623757 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-7bmtj" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.673701 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-5npjh" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.705182 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-wlg4f" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.743351 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-bljb9" Jan 04 12:06:14 crc kubenswrapper[4797]: I0104 12:06:14.766817 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-568985c78-pktt5" Jan 04 12:06:15 crc kubenswrapper[4797]: I0104 12:06:15.102215 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-28htf" Jan 04 12:06:16 crc kubenswrapper[4797]: I0104 12:06:16.834072 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-97vck" Jan 04 12:06:17 crc kubenswrapper[4797]: E0104 12:06:17.479673 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podUID="5c97e032-2a27-4cd4-bcd7-70d423968689" Jan 04 12:06:18 crc kubenswrapper[4797]: E0104 12:06:18.476788 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podUID="73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba" Jan 04 12:06:18 crc kubenswrapper[4797]: E0104 12:06:18.477195 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podUID="0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb" Jan 04 12:06:20 crc kubenswrapper[4797]: I0104 12:06:20.252240 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88" Jan 04 12:06:20 crc kubenswrapper[4797]: I0104 12:06:20.253669 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-ql65x" Jan 04 12:06:32 crc kubenswrapper[4797]: I0104 12:06:32.478758 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.597168 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" event={"ID":"73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba","Type":"ContainerStarted","Data":"22ee5f5933a0cc2234684dd6a95349612772082a1f97ad223d6d99eccce36f81"} Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.598744 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.600287 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" event={"ID":"5c97e032-2a27-4cd4-bcd7-70d423968689","Type":"ContainerStarted","Data":"b52b1e049c0371f37b999bb2d0696d33080eb41294cfe4446c94d36d20f9ee82"} Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.601063 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.639974 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" podStartSLOduration=2.317528523 podStartE2EDuration="59.639933423s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.825417131 +0000 UTC m=+1034.682603840" lastFinishedPulling="2026-01-04 12:06:33.147822031 +0000 UTC m=+1092.005008740" observedRunningTime="2026-01-04 12:06:33.630648025 +0000 UTC m=+1092.487834774" watchObservedRunningTime="2026-01-04 12:06:33.639933423 +0000 UTC m=+1092.497120182" Jan 04 12:06:33 crc kubenswrapper[4797]: I0104 12:06:33.662144 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" podStartSLOduration=2.603476439 podStartE2EDuration="59.662116905s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.952443203 +0000 UTC m=+1034.809629902" lastFinishedPulling="2026-01-04 12:06:33.011083649 +0000 UTC m=+1091.868270368" observedRunningTime="2026-01-04 12:06:33.649848928 +0000 UTC m=+1092.507035667" watchObservedRunningTime="2026-01-04 12:06:33.662116905 +0000 UTC m=+1092.519303624" Jan 04 12:06:37 crc kubenswrapper[4797]: I0104 12:06:37.627190 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" event={"ID":"0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb","Type":"ContainerStarted","Data":"8e9f9ff89da5cea87e42e05f26176d5c352463c3509a416bcda2c6115721c193"} Jan 04 12:06:38 crc kubenswrapper[4797]: I0104 12:06:38.657552 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-2tb7m" podStartSLOduration=6.245275095 podStartE2EDuration="1m4.657527609s" podCreationTimestamp="2026-01-04 12:05:34 +0000 UTC" firstStartedPulling="2026-01-04 12:05:35.934096053 +0000 UTC m=+1034.791282762" lastFinishedPulling="2026-01-04 12:06:34.346348567 +0000 UTC m=+1093.203535276" observedRunningTime="2026-01-04 12:06:38.652647798 +0000 UTC m=+1097.509834547" watchObservedRunningTime="2026-01-04 12:06:38.657527609 +0000 UTC m=+1097.514714348" Jan 04 12:06:44 crc kubenswrapper[4797]: I0104 12:06:44.675913 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-zk4v4" Jan 04 12:06:44 crc kubenswrapper[4797]: I0104 12:06:44.898957 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-l6drs" Jan 04 12:06:49 crc kubenswrapper[4797]: I0104 12:06:49.492914 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:06:49 crc kubenswrapper[4797]: I0104 12:06:49.494352 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.213514 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.225423 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.232999 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.233593 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-mfj97" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.233677 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.233742 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.234291 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.246429 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.325503 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.325752 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvtwp\" (UniqueName: \"kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.325997 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.427466 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.427538 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvtwp\" (UniqueName: \"kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.427639 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.428453 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.429065 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.459719 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvtwp\" (UniqueName: \"kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp\") pod \"dnsmasq-dns-5f854695bc-lf857\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.551827 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:02 crc kubenswrapper[4797]: I0104 12:07:02.876710 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:03 crc kubenswrapper[4797]: I0104 12:07:03.859894 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-lf857" event={"ID":"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9","Type":"ContainerStarted","Data":"6aad2677cd8f08e436cf824ae8eb381a6a710a90f58b860664cac39993275ba2"} Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.037369 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.038761 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.058181 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.073342 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.073409 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.073440 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkjcm\" (UniqueName: \"kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.174412 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.174662 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.174681 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkjcm\" (UniqueName: \"kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.175597 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.175647 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.202389 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkjcm\" (UniqueName: \"kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm\") pod \"dnsmasq-dns-744ffd65bc-x82jn\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.324389 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.358236 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.358494 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.359716 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.376305 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.478128 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.478180 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.478256 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2spf\" (UniqueName: \"kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.579088 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2spf\" (UniqueName: \"kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.579194 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.579230 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.580180 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.580202 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.616051 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2spf\" (UniqueName: \"kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf\") pod \"dnsmasq-dns-95f5f6995-wqqvc\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.680486 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:05 crc kubenswrapper[4797]: I0104 12:07:05.858498 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:05 crc kubenswrapper[4797]: W0104 12:07:05.865344 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfc23227_ee2c_4fd7_9bac_dbdca5d9e133.slice/crio-5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92 WatchSource:0}: Error finding container 5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92: Status 404 returned error can't find the container with id 5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92 Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.142067 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.208558 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.216423 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.218610 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.218659 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.218914 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.218972 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.219152 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rqk6j" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.219331 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.220091 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.229210 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292661 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292712 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292731 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292768 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292796 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292828 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djp9v\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292842 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292876 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292892 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292912 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.292928 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394816 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394868 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394898 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394920 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394937 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.394967 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395037 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395075 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djp9v\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395090 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395127 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395143 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395694 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.395821 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.396007 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.396189 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.396277 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.396424 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.400891 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.401487 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.411892 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.413810 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.416668 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djp9v\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.419660 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.484999 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.486223 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.489711 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.489966 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.490153 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.490305 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-ld85c" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.491215 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.491453 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.491684 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.499297 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.550449 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598676 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598726 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598743 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598773 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598790 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598812 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph2cq\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598856 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598874 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598901 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598923 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.598949 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700168 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700217 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700238 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700277 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700299 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700314 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700341 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700357 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700379 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph2cq\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700414 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700432 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.700824 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.701772 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.701973 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.702334 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.702608 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.703040 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.704124 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.705710 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.706278 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.711295 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.719091 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph2cq\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.719780 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.823932 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:06 crc kubenswrapper[4797]: I0104 12:07:06.878949 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" event={"ID":"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133","Type":"ContainerStarted","Data":"5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92"} Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.635252 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.636582 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.643698 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9w2rl" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.647060 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.647220 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.650250 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.650964 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.675103 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.820434 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824112 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824298 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xptj2\" (UniqueName: \"kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824573 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824656 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824682 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.824710 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.828411 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938672 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938724 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938743 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938777 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938808 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938844 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938869 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xptj2\" (UniqueName: \"kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.938897 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.940571 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.940717 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.941354 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.943958 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.944511 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.950809 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.970376 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:07 crc kubenswrapper[4797]: I0104 12:07:07.979629 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xptj2\" (UniqueName: \"kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:08 crc kubenswrapper[4797]: I0104 12:07:08.005103 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " pod="openstack/openstack-galera-0" Jan 04 12:07:08 crc kubenswrapper[4797]: I0104 12:07:08.271563 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:07:08 crc kubenswrapper[4797]: W0104 12:07:08.695087 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod081af9ec_8533_4fa8_911c_13ad7a288a1f.slice/crio-a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1 WatchSource:0}: Error finding container a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1: Status 404 returned error can't find the container with id a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1 Jan 04 12:07:08 crc kubenswrapper[4797]: I0104 12:07:08.940844 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" event={"ID":"081af9ec-8533-4fa8-911c-13ad7a288a1f","Type":"ContainerStarted","Data":"a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1"} Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.188278 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.190211 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.192324 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rdc6f" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.192942 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.193084 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.193364 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.206621 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257401 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257452 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257474 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257507 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257529 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phcfm\" (UniqueName: \"kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257557 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257595 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.257623 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.359022 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phcfm\" (UniqueName: \"kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.359079 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360477 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360520 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360571 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360601 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360639 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360663 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.360707 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.361320 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.361459 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.361827 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.362949 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.368366 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.368730 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.387610 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phcfm\" (UniqueName: \"kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.411555 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.518413 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.583427 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.584513 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.590415 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.590672 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-wfnmk" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.590888 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.598292 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.665383 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.665441 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.665485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.665517 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxs9n\" (UniqueName: \"kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.665571 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.766375 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.766427 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.766457 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.766493 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.766526 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxs9n\" (UniqueName: \"kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.767265 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.767976 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.771758 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.773563 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.788547 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxs9n\" (UniqueName: \"kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n\") pod \"memcached-0\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " pod="openstack/memcached-0" Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.794527 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:09 crc kubenswrapper[4797]: I0104 12:07:09.913209 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.321144 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.321941 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.324331 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jkpzj" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.372403 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.394856 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc2zn\" (UniqueName: \"kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn\") pod \"kube-state-metrics-0\" (UID: \"681bdc49-0a76-4a29-b7c0-1f4e051636fb\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.495862 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc2zn\" (UniqueName: \"kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn\") pod \"kube-state-metrics-0\" (UID: \"681bdc49-0a76-4a29-b7c0-1f4e051636fb\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.521174 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc2zn\" (UniqueName: \"kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn\") pod \"kube-state-metrics-0\" (UID: \"681bdc49-0a76-4a29-b7c0-1f4e051636fb\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:11 crc kubenswrapper[4797]: I0104 12:07:11.646637 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:07:14 crc kubenswrapper[4797]: I0104 12:07:14.530966 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerStarted","Data":"04e7c09d94dcf8dd3e32b58f307acc57b1b92bb8f74bb451e4e224e9aacb9123"} Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.842970 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.845432 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.847396 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.847680 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-lpsg4" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.852154 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.856813 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.859558 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.859683 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.859795 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.859870 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.859977 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.860096 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pwh4\" (UniqueName: \"kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.860222 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.883184 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.895340 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.901233 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961493 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961542 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961559 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961576 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961593 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9tx2\" (UniqueName: \"kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961623 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961652 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961676 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961694 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961722 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961744 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961767 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.961792 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pwh4\" (UniqueName: \"kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.962448 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.962533 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.962844 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.964805 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.966969 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.967485 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:15 crc kubenswrapper[4797]: I0104 12:07:15.978962 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pwh4\" (UniqueName: \"kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4\") pod \"ovn-controller-8lx8k\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.063892 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064152 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064563 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064704 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064844 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064890 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.064949 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.065188 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9tx2\" (UniqueName: \"kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.065532 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.067677 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.069191 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.086810 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9tx2\" (UniqueName: \"kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2\") pod \"ovn-controller-ovs-2ft9n\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.191262 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.206386 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.703111 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.704182 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.708289 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.708448 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.708600 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.708716 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.710148 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-2cmjl" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.716691 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877402 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877444 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877523 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b57fv\" (UniqueName: \"kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877549 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877595 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877626 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877659 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.877675 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.978856 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.978949 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979044 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979112 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979165 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979288 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b57fv\" (UniqueName: \"kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979355 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979428 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979447 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.979514 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.980598 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.981096 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.989636 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:16 crc kubenswrapper[4797]: I0104 12:07:16.990154 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:17 crc kubenswrapper[4797]: I0104 12:07:17.007878 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:17 crc kubenswrapper[4797]: I0104 12:07:17.008523 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b57fv\" (UniqueName: \"kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:17 crc kubenswrapper[4797]: I0104 12:07:17.010667 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:17 crc kubenswrapper[4797]: I0104 12:07:17.033885 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.529418 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.531262 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.539743 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.541279 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-td9kw" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.541562 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.552571 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.577571 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.632817 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.632911 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633010 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfbdx\" (UniqueName: \"kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633137 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633229 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633349 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633398 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.633422 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735099 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735162 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfbdx\" (UniqueName: \"kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735211 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735248 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735768 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735816 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735836 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735854 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.736796 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.735408 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.737038 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.738643 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.746722 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.748654 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.769289 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.797996 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.798049 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfbdx\" (UniqueName: \"kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx\") pod \"ovsdbserver-nb-0\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[4797]: I0104 12:07:18.896568 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:19 crc kubenswrapper[4797]: I0104 12:07:19.493638 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:07:19 crc kubenswrapper[4797]: I0104 12:07:19.493727 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:07:21 crc kubenswrapper[4797]: I0104 12:07:21.878351 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.728410 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.728928 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b2spf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95f5f6995-wqqvc_openstack(081af9ec-8533-4fa8-911c-13ad7a288a1f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.730213 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" podUID="081af9ec-8533-4fa8-911c-13ad7a288a1f" Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.737824 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.738067 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rvtwp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-lf857_openstack(a1a99bb5-ad2a-455f-aba6-d4d072bd01a9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:22 crc kubenswrapper[4797]: E0104 12:07:22.739323 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-lf857" podUID="a1a99bb5-ad2a-455f-aba6-d4d072bd01a9" Jan 04 12:07:23 crc kubenswrapper[4797]: I0104 12:07:23.185697 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:23 crc kubenswrapper[4797]: I0104 12:07:23.289829 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:07:23 crc kubenswrapper[4797]: I0104 12:07:23.296145 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:23 crc kubenswrapper[4797]: I0104 12:07:23.305272 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:23 crc kubenswrapper[4797]: I0104 12:07:23.634643 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerStarted","Data":"6030299dc435f9ffcfd7689657cf8b99ec0c84d025c97d869e16e8efea13d8c9"} Jan 04 12:07:23 crc kubenswrapper[4797]: E0104 12:07:23.636447 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" podUID="081af9ec-8533-4fa8-911c-13ad7a288a1f" Jan 04 12:07:24 crc kubenswrapper[4797]: W0104 12:07:24.145034 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1955fef_0f64_4332_b967_c50875302a97.slice/crio-9c5c0c5c57dc1b5326ec40f64bc36adcb8d3e1b7a4c02d8e8e54eab46f6cc4c1 WatchSource:0}: Error finding container 9c5c0c5c57dc1b5326ec40f64bc36adcb8d3e1b7a4c02d8e8e54eab46f6cc4c1: Status 404 returned error can't find the container with id 9c5c0c5c57dc1b5326ec40f64bc36adcb8d3e1b7a4c02d8e8e54eab46f6cc4c1 Jan 04 12:07:24 crc kubenswrapper[4797]: W0104 12:07:24.148012 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ffb9045_87ff_4c59_ac14_5de55b6cd42e.slice/crio-3bdb7ce84a0316f6b77bcb4876a493653157233c83e3edc1a992d6a330dbccc2 WatchSource:0}: Error finding container 3bdb7ce84a0316f6b77bcb4876a493653157233c83e3edc1a992d6a330dbccc2: Status 404 returned error can't find the container with id 3bdb7ce84a0316f6b77bcb4876a493653157233c83e3edc1a992d6a330dbccc2 Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.314559 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.385189 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvtwp\" (UniqueName: \"kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp\") pod \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.385248 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc\") pod \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.385274 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config\") pod \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\" (UID: \"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9\") " Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.385891 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config" (OuterVolumeSpecName: "config") pod "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9" (UID: "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.390148 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp" (OuterVolumeSpecName: "kube-api-access-rvtwp") pod "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9" (UID: "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9"). InnerVolumeSpecName "kube-api-access-rvtwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.395219 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9" (UID: "a1a99bb5-ad2a-455f-aba6-d4d072bd01a9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.491095 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvtwp\" (UniqueName: \"kubernetes.io/projected/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-kube-api-access-rvtwp\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.491558 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.491607 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.648840 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"102e7d3d-5368-4d87-ba33-874aeed5eaa9","Type":"ContainerStarted","Data":"581283018b89c218e4e5ed02a9fb4fa5128f43172ca416163f25d6c14cd70c26"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.652523 4797 generic.go:334] "Generic (PLEG): container finished" podID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" containerID="4d40f334d1ddce402a853dae1624ea09bc54b4ce1eb605752f43dee829d250b6" exitCode=0 Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.652647 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" event={"ID":"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133","Type":"ContainerDied","Data":"4d40f334d1ddce402a853dae1624ea09bc54b4ce1eb605752f43dee829d250b6"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.657455 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k" event={"ID":"9ffb9045-87ff-4c59-ac14-5de55b6cd42e","Type":"ContainerStarted","Data":"3bdb7ce84a0316f6b77bcb4876a493653157233c83e3edc1a992d6a330dbccc2"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.659266 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"681bdc49-0a76-4a29-b7c0-1f4e051636fb","Type":"ContainerStarted","Data":"854343d9ae5b1d91fb65167e001cc57e27f45cecb00e67a23ec93771a6401a24"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.665021 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-lf857" event={"ID":"a1a99bb5-ad2a-455f-aba6-d4d072bd01a9","Type":"ContainerDied","Data":"6aad2677cd8f08e436cf824ae8eb381a6a710a90f58b860664cac39993275ba2"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.665351 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-lf857" Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.674363 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerStarted","Data":"9c5c0c5c57dc1b5326ec40f64bc36adcb8d3e1b7a4c02d8e8e54eab46f6cc4c1"} Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.803012 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.828047 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-lf857"] Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.835486 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.843603 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:24 crc kubenswrapper[4797]: I0104 12:07:24.882164 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:07:24 crc kubenswrapper[4797]: W0104 12:07:24.922609 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe75b707_995c_4dd4_958a_a7c2b8e4fb4e.slice/crio-e97a6a189846df8453b6f473dcf31d0ba9db2f9777cf92d8280d68b02b850d5d WatchSource:0}: Error finding container e97a6a189846df8453b6f473dcf31d0ba9db2f9777cf92d8280d68b02b850d5d: Status 404 returned error can't find the container with id e97a6a189846df8453b6f473dcf31d0ba9db2f9777cf92d8280d68b02b850d5d Jan 04 12:07:24 crc kubenswrapper[4797]: W0104 12:07:24.934538 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72e6c6e9_97f2_4420_a6b9_92418e78dd60.slice/crio-9be205198f51cb0a4cd3b06beccbbed5cb91576c2cfce549967de9b7f80d5632 WatchSource:0}: Error finding container 9be205198f51cb0a4cd3b06beccbbed5cb91576c2cfce549967de9b7f80d5632: Status 404 returned error can't find the container with id 9be205198f51cb0a4cd3b06beccbbed5cb91576c2cfce549967de9b7f80d5632 Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.484326 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1a99bb5-ad2a-455f-aba6-d4d072bd01a9" path="/var/lib/kubelet/pods/a1a99bb5-ad2a-455f-aba6-d4d072bd01a9/volumes" Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.709774 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerStarted","Data":"9be205198f51cb0a4cd3b06beccbbed5cb91576c2cfce549967de9b7f80d5632"} Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.711443 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerStarted","Data":"245e10bca25f08bc626b5aac80b4d7c9c27d64f07c83fb38359bbb8adbb0f904"} Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.713797 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerStarted","Data":"d1315ae8a058420bc7be271bbc4e77777c76d634265824c96d8d296c973a410a"} Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.714940 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerStarted","Data":"e97a6a189846df8453b6f473dcf31d0ba9db2f9777cf92d8280d68b02b850d5d"} Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.716514 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerStarted","Data":"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5"} Jan 04 12:07:25 crc kubenswrapper[4797]: I0104 12:07:25.935929 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.018870 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.020949 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.030128 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.092168 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.164512 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169548 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169621 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169654 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169674 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5glk\" (UniqueName: \"kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169731 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.169750 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.190549 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.191659 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.194383 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.208682 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.273559 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.273965 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274023 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274051 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5glk\" (UniqueName: \"kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274136 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274164 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274407 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274478 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.274703 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.280142 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.280530 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.303316 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5glk\" (UniqueName: \"kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk\") pod \"ovn-controller-metrics-txsr9\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: W0104 12:07:29.322776 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d84926f_3521_47ed_9581_a7beb6762e06.slice/crio-bf9532ac64b46e063d3f67cc8b7a7c6b2ae05b027b7998fb4afed29a2756f447 WatchSource:0}: Error finding container bf9532ac64b46e063d3f67cc8b7a7c6b2ae05b027b7998fb4afed29a2756f447: Status 404 returned error can't find the container with id bf9532ac64b46e063d3f67cc8b7a7c6b2ae05b027b7998fb4afed29a2756f447 Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.351198 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.375476 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.375541 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.375606 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lgdp\" (UniqueName: \"kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.375666 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.466216 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.477350 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lgdp\" (UniqueName: \"kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.477732 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.478660 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.479248 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.479354 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.479954 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.480245 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.506029 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.506153 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lgdp\" (UniqueName: \"kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp\") pod \"dnsmasq-dns-5b79764b65-lqwr6\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.507584 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.518759 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.521899 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.548066 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.681836 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.681893 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.681921 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.682198 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.682309 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nf6c\" (UniqueName: \"kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.743802 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerStarted","Data":"bf9532ac64b46e063d3f67cc8b7a7c6b2ae05b027b7998fb4afed29a2756f447"} Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.783528 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nf6c\" (UniqueName: \"kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.783637 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.783682 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.783719 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.783769 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.784887 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.785506 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.785806 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.786481 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.813283 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nf6c\" (UniqueName: \"kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c\") pod \"dnsmasq-dns-586b989cdc-w9qc6\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:29 crc kubenswrapper[4797]: I0104 12:07:29.860510 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:30 crc kubenswrapper[4797]: E0104 12:07:30.537892 4797 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 04 12:07:30 crc kubenswrapper[4797]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 04 12:07:30 crc kubenswrapper[4797]: > podSandboxID="5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92" Jan 04 12:07:30 crc kubenswrapper[4797]: E0104 12:07:30.538105 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:07:30 crc kubenswrapper[4797]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rkjcm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-744ffd65bc-x82jn_openstack(cfc23227-ee2c-4fd7-9bac-dbdca5d9e133): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 04 12:07:30 crc kubenswrapper[4797]: > logger="UnhandledError" Jan 04 12:07:30 crc kubenswrapper[4797]: E0104 12:07:30.539265 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" podUID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.658478 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.659256 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xptj2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(c1955fef-0f64-4332-b967-c50875302a97): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.660513 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="c1955fef-0f64-4332-b967-c50875302a97" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.802605 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" event={"ID":"081af9ec-8533-4fa8-911c-13ad7a288a1f","Type":"ContainerDied","Data":"a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1"} Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.802865 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a35806f5527d5a75bacbbff8303ca20fc86652ce2f571c05df947096ac7342a1" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.804923 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" event={"ID":"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133","Type":"ContainerDied","Data":"5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92"} Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.805184 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c5da9cebd3cf93b381c5b86ce85c1cd814620ecc067f630f281c257ab638f92" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.806428 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-galera-0" podUID="c1955fef-0f64-4332-b967-c50875302a97" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.819943 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.820193 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5bdh689h564h4h685h568h5c9hcch656h5b9h575h68dhd8h9ch5d6h7fh695h665h65fhc7h685h5c9h5fdh8bhf9h547h9fh6bh5c4h5f7h5cbhcfq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5pwh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-8lx8k_openstack(9ffb9045-87ff-4c59-ac14-5de55b6cd42e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:36 crc kubenswrapper[4797]: E0104 12:07:36.821426 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.868304 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.872796 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998206 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config\") pod \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998282 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc\") pod \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998384 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2spf\" (UniqueName: \"kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf\") pod \"081af9ec-8533-4fa8-911c-13ad7a288a1f\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998464 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc\") pod \"081af9ec-8533-4fa8-911c-13ad7a288a1f\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998501 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config\") pod \"081af9ec-8533-4fa8-911c-13ad7a288a1f\" (UID: \"081af9ec-8533-4fa8-911c-13ad7a288a1f\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.998537 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkjcm\" (UniqueName: \"kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm\") pod \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\" (UID: \"cfc23227-ee2c-4fd7-9bac-dbdca5d9e133\") " Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.999793 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config" (OuterVolumeSpecName: "config") pod "081af9ec-8533-4fa8-911c-13ad7a288a1f" (UID: "081af9ec-8533-4fa8-911c-13ad7a288a1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:36 crc kubenswrapper[4797]: I0104 12:07:36.999844 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "081af9ec-8533-4fa8-911c-13ad7a288a1f" (UID: "081af9ec-8533-4fa8-911c-13ad7a288a1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.004690 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm" (OuterVolumeSpecName: "kube-api-access-rkjcm") pod "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" (UID: "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133"). InnerVolumeSpecName "kube-api-access-rkjcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.006085 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf" (OuterVolumeSpecName: "kube-api-access-b2spf") pod "081af9ec-8533-4fa8-911c-13ad7a288a1f" (UID: "081af9ec-8533-4fa8-911c-13ad7a288a1f"). InnerVolumeSpecName "kube-api-access-b2spf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.048592 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config" (OuterVolumeSpecName: "config") pod "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" (UID: "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.061631 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" (UID: "cfc23227-ee2c-4fd7-9bac-dbdca5d9e133"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100673 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100725 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100745 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2spf\" (UniqueName: \"kubernetes.io/projected/081af9ec-8533-4fa8-911c-13ad7a288a1f-kube-api-access-b2spf\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100818 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100841 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/081af9ec-8533-4fa8-911c-13ad7a288a1f-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.100858 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkjcm\" (UniqueName: \"kubernetes.io/projected/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133-kube-api-access-rkjcm\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.811865 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-x82jn" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.811923 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-wqqvc" Jan 04 12:07:37 crc kubenswrapper[4797]: E0104 12:07:37.814265 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de\\\"\"" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.881084 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.887808 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-wqqvc"] Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.935284 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:37 crc kubenswrapper[4797]: I0104 12:07:37.942949 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-x82jn"] Jan 04 12:07:39 crc kubenswrapper[4797]: I0104 12:07:39.483953 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="081af9ec-8533-4fa8-911c-13ad7a288a1f" path="/var/lib/kubelet/pods/081af9ec-8533-4fa8-911c-13ad7a288a1f/volumes" Jan 04 12:07:39 crc kubenswrapper[4797]: I0104 12:07:39.485101 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" path="/var/lib/kubelet/pods/cfc23227-ee2c-4fd7-9bac-dbdca5d9e133/volumes" Jan 04 12:07:39 crc kubenswrapper[4797]: I0104 12:07:39.867380 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:07:40 crc kubenswrapper[4797]: I0104 12:07:40.756867 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:07:40 crc kubenswrapper[4797]: I0104 12:07:40.809813 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:40 crc kubenswrapper[4797]: I0104 12:07:40.838759 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" event={"ID":"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00","Type":"ContainerStarted","Data":"b8ed824316e3b48d53b1a8dca3a1545d12c9e9d5c5aea13b73d84344c52f8d46"} Jan 04 12:07:42 crc kubenswrapper[4797]: W0104 12:07:42.203312 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6cb8a97_f0f7_4182_901b_84c315967b1f.slice/crio-54042b7dec1e3a17615e5ec789b8a10493942572ff8ecf2ad5f4bd1aa20e052e WatchSource:0}: Error finding container 54042b7dec1e3a17615e5ec789b8a10493942572ff8ecf2ad5f4bd1aa20e052e: Status 404 returned error can't find the container with id 54042b7dec1e3a17615e5ec789b8a10493942572ff8ecf2ad5f4bd1aa20e052e Jan 04 12:07:42 crc kubenswrapper[4797]: E0104 12:07:42.212847 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 04 12:07:42 crc kubenswrapper[4797]: E0104 12:07:42.212927 4797 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 04 12:07:42 crc kubenswrapper[4797]: E0104 12:07:42.213193 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jc2zn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(681bdc49-0a76-4a29-b7c0-1f4e051636fb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 12:07:42 crc kubenswrapper[4797]: E0104 12:07:42.214404 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.854557 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerStarted","Data":"13b3c1f89d1638121f9ff2d3c0347d60458aa80a5677df698bfe3f584120c7fe"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.857488 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerStarted","Data":"1b47902615c0883c42301d21eb8e7a3ecf78720d4c51636bcc79479f16c32b75"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.859045 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"102e7d3d-5368-4d87-ba33-874aeed5eaa9","Type":"ContainerStarted","Data":"2a9eccf9a453b475692f8ed1731c1a41b538844deba6d050f2d590b9042ae5f2"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.859162 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.861459 4797 generic.go:334] "Generic (PLEG): container finished" podID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerID="1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80" exitCode=0 Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.861525 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" event={"ID":"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00","Type":"ContainerDied","Data":"1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.862933 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerStarted","Data":"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.864009 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-txsr9" event={"ID":"a305d4e5-d5e2-4bac-85ec-568c06b92b98","Type":"ContainerStarted","Data":"1641f6584669b10c7e6092eb04077767c3bc7d6d0dd4e03089481e7c49b1e589"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.865073 4797 generic.go:334] "Generic (PLEG): container finished" podID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerID="f7ac26be04b5f947b616410b66262f7a13f5e79ab0c39ee80c1a81b2352dc343" exitCode=0 Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.865155 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" event={"ID":"a6cb8a97-f0f7-4182-901b-84c315967b1f","Type":"ContainerDied","Data":"f7ac26be04b5f947b616410b66262f7a13f5e79ab0c39ee80c1a81b2352dc343"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.865191 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" event={"ID":"a6cb8a97-f0f7-4182-901b-84c315967b1f","Type":"ContainerStarted","Data":"54042b7dec1e3a17615e5ec789b8a10493942572ff8ecf2ad5f4bd1aa20e052e"} Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.866439 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerStarted","Data":"d3af461d9e11838f802cfe0a7de35c8ace652dcb15039a7ab0301617ec90e7f2"} Jan 04 12:07:42 crc kubenswrapper[4797]: E0104 12:07:42.867860 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb\\\"\"" pod="openstack/kube-state-metrics-0" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" Jan 04 12:07:42 crc kubenswrapper[4797]: I0104 12:07:42.910338 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=19.632778636 podStartE2EDuration="33.91031926s" podCreationTimestamp="2026-01-04 12:07:09 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.154575573 +0000 UTC m=+1143.011762282" lastFinishedPulling="2026-01-04 12:07:38.432116187 +0000 UTC m=+1157.289302906" observedRunningTime="2026-01-04 12:07:42.894256152 +0000 UTC m=+1161.751442851" watchObservedRunningTime="2026-01-04 12:07:42.91031926 +0000 UTC m=+1161.767505969" Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.885207 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" event={"ID":"a6cb8a97-f0f7-4182-901b-84c315967b1f","Type":"ContainerStarted","Data":"3f0e443570dca42265ef82d5cdf6ddf0e076e4a70c2f285160a11793f15e3c03"} Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.885515 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.889618 4797 generic.go:334] "Generic (PLEG): container finished" podID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerID="13b3c1f89d1638121f9ff2d3c0347d60458aa80a5677df698bfe3f584120c7fe" exitCode=0 Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.889678 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerDied","Data":"13b3c1f89d1638121f9ff2d3c0347d60458aa80a5677df698bfe3f584120c7fe"} Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.892768 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" event={"ID":"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00","Type":"ContainerStarted","Data":"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6"} Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.893421 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.906300 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" podStartSLOduration=15.90627833 podStartE2EDuration="15.90627833s" podCreationTimestamp="2026-01-04 12:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:07:44.904810411 +0000 UTC m=+1163.761997130" watchObservedRunningTime="2026-01-04 12:07:44.90627833 +0000 UTC m=+1163.763465039" Jan 04 12:07:44 crc kubenswrapper[4797]: I0104 12:07:44.955493 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" podStartSLOduration=15.955473913 podStartE2EDuration="15.955473913s" podCreationTimestamp="2026-01-04 12:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:07:44.950262334 +0000 UTC m=+1163.807449073" watchObservedRunningTime="2026-01-04 12:07:44.955473913 +0000 UTC m=+1163.812660622" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.493785 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.494678 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.494756 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.495850 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.495982 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766" gracePeriod=600 Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.550156 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.862201 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.915138 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.920705 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:49 crc kubenswrapper[4797]: I0104 12:07:49.942934 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="dnsmasq-dns" containerID="cri-o://3f0e443570dca42265ef82d5cdf6ddf0e076e4a70c2f285160a11793f15e3c03" gracePeriod=10 Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.952270 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerStarted","Data":"07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093"} Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.955867 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766" exitCode=0 Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.955933 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766"} Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.956003 4797 scope.go:117] "RemoveContainer" containerID="3795bf3e5874fa6b9680fec4f4448847de34ba9683c7b1a3e9b1a43713ad076c" Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.958157 4797 generic.go:334] "Generic (PLEG): container finished" podID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerID="3f0e443570dca42265ef82d5cdf6ddf0e076e4a70c2f285160a11793f15e3c03" exitCode=0 Jan 04 12:07:50 crc kubenswrapper[4797]: I0104 12:07:50.958188 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" event={"ID":"a6cb8a97-f0f7-4182-901b-84c315967b1f","Type":"ContainerDied","Data":"3f0e443570dca42265ef82d5cdf6ddf0e076e4a70c2f285160a11793f15e3c03"} Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.909025 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:07:51 crc kubenswrapper[4797]: E0104 12:07:51.909541 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" containerName="init" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.909558 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" containerName="init" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.909702 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc23227-ee2c-4fd7-9bac-dbdca5d9e133" containerName="init" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.910530 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.911810 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.930819 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.980471 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" event={"ID":"a6cb8a97-f0f7-4182-901b-84c315967b1f","Type":"ContainerDied","Data":"54042b7dec1e3a17615e5ec789b8a10493942572ff8ecf2ad5f4bd1aa20e052e"} Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.980727 4797 scope.go:117] "RemoveContainer" containerID="3f0e443570dca42265ef82d5cdf6ddf0e076e4a70c2f285160a11793f15e3c03" Jan 04 12:07:51 crc kubenswrapper[4797]: I0104 12:07:51.980652 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-lqwr6" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.012096 4797 scope.go:117] "RemoveContainer" containerID="f7ac26be04b5f947b616410b66262f7a13f5e79ab0c39ee80c1a81b2352dc343" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101432 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc\") pod \"a6cb8a97-f0f7-4182-901b-84c315967b1f\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101529 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config\") pod \"a6cb8a97-f0f7-4182-901b-84c315967b1f\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101659 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb\") pod \"a6cb8a97-f0f7-4182-901b-84c315967b1f\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101680 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lgdp\" (UniqueName: \"kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp\") pod \"a6cb8a97-f0f7-4182-901b-84c315967b1f\" (UID: \"a6cb8a97-f0f7-4182-901b-84c315967b1f\") " Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101855 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101877 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25pmf\" (UniqueName: \"kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101915 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.101977 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.102033 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.107571 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp" (OuterVolumeSpecName: "kube-api-access-5lgdp") pod "a6cb8a97-f0f7-4182-901b-84c315967b1f" (UID: "a6cb8a97-f0f7-4182-901b-84c315967b1f"). InnerVolumeSpecName "kube-api-access-5lgdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.179118 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config" (OuterVolumeSpecName: "config") pod "a6cb8a97-f0f7-4182-901b-84c315967b1f" (UID: "a6cb8a97-f0f7-4182-901b-84c315967b1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.202859 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203162 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203180 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25pmf\" (UniqueName: \"kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203217 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203280 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203359 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.203372 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lgdp\" (UniqueName: \"kubernetes.io/projected/a6cb8a97-f0f7-4182-901b-84c315967b1f-kube-api-access-5lgdp\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.204085 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.204834 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.205151 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.205198 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.231132 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25pmf\" (UniqueName: \"kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf\") pod \"dnsmasq-dns-67fdf7998c-k9xdt\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.251816 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6cb8a97-f0f7-4182-901b-84c315967b1f" (UID: "a6cb8a97-f0f7-4182-901b-84c315967b1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.262340 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a6cb8a97-f0f7-4182-901b-84c315967b1f" (UID: "a6cb8a97-f0f7-4182-901b-84c315967b1f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.282107 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.309945 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.309974 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6cb8a97-f0f7-4182-901b-84c315967b1f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.311608 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.317017 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-lqwr6"] Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.705222 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:07:52 crc kubenswrapper[4797]: W0104 12:07:52.707597 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1650ba5a_ccad_4447_ad19_02185249e682.slice/crio-2546e481a10f8178a92fd4f30b6aab587bc2d364e117e99ff9c89100bfe85cbe WatchSource:0}: Error finding container 2546e481a10f8178a92fd4f30b6aab587bc2d364e117e99ff9c89100bfe85cbe: Status 404 returned error can't find the container with id 2546e481a10f8178a92fd4f30b6aab587bc2d364e117e99ff9c89100bfe85cbe Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.991410 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerStarted","Data":"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444"} Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.994850 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerStarted","Data":"876e9084cf30c1be04d840130d0d0fc76c012572a641f8f762449c931b2c2a7b"} Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.996230 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k" event={"ID":"9ffb9045-87ff-4c59-ac14-5de55b6cd42e","Type":"ContainerStarted","Data":"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6"} Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.996720 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8lx8k" Jan 04 12:07:52 crc kubenswrapper[4797]: I0104 12:07:52.997868 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-txsr9" event={"ID":"a305d4e5-d5e2-4bac-85ec-568c06b92b98","Type":"ContainerStarted","Data":"db595fec7696fae4c7b3b7368a9d2399b874117452a45b57928c938fea8c4220"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.000646 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerStarted","Data":"d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.000927 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.002569 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.003887 4797 generic.go:334] "Generic (PLEG): container finished" podID="1650ba5a-ccad-4447-ad19-02185249e682" containerID="de1d4658c4876b69c123dad8d47fa379445823451e9792186447d7758b569734" exitCode=0 Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.003929 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" event={"ID":"1650ba5a-ccad-4447-ad19-02185249e682","Type":"ContainerDied","Data":"de1d4658c4876b69c123dad8d47fa379445823451e9792186447d7758b569734"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.003944 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" event={"ID":"1650ba5a-ccad-4447-ad19-02185249e682","Type":"ContainerStarted","Data":"2546e481a10f8178a92fd4f30b6aab587bc2d364e117e99ff9c89100bfe85cbe"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.006391 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerStarted","Data":"ab503fd970b32577760bbe5ec35f3c0df3a184059f20a5bf6e5b7c34bf2d9638"} Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.035836 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.064459 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.129709195 podStartE2EDuration="36.064432978s" podCreationTimestamp="2026-01-04 12:07:17 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.930131437 +0000 UTC m=+1143.787318146" lastFinishedPulling="2026-01-04 12:07:51.86485522 +0000 UTC m=+1170.722041929" observedRunningTime="2026-01-04 12:07:53.053073035 +0000 UTC m=+1171.910259784" watchObservedRunningTime="2026-01-04 12:07:53.064432978 +0000 UTC m=+1171.921619707" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.130561 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.130895 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="init" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.130907 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="init" Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.130923 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="dnsmasq-dns" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.130928 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="dnsmasq-dns" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.131130 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" containerName="dnsmasq-dns" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.135871 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.136030 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.140426 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.140517 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-5tp46" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.140651 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.140428 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.177069 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.192449 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-2ft9n" podStartSLOduration=22.817360363 podStartE2EDuration="38.192419163s" podCreationTimestamp="2026-01-04 12:07:15 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.924704062 +0000 UTC m=+1143.781890771" lastFinishedPulling="2026-01-04 12:07:40.299762852 +0000 UTC m=+1159.156949571" observedRunningTime="2026-01-04 12:07:53.140787236 +0000 UTC m=+1171.997973945" watchObservedRunningTime="2026-01-04 12:07:53.192419163 +0000 UTC m=+1172.049605872" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.198413 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=16.84951956 podStartE2EDuration="38.198395373s" podCreationTimestamp="2026-01-04 12:07:15 +0000 UTC" firstStartedPulling="2026-01-04 12:07:30.54083344 +0000 UTC m=+1149.398020149" lastFinishedPulling="2026-01-04 12:07:51.889709253 +0000 UTC m=+1170.746895962" observedRunningTime="2026-01-04 12:07:53.172129232 +0000 UTC m=+1172.029315981" watchObservedRunningTime="2026-01-04 12:07:53.198395373 +0000 UTC m=+1172.055582082" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.227268 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8lx8k" podStartSLOduration=10.254878243 podStartE2EDuration="38.227250943s" podCreationTimestamp="2026-01-04 12:07:15 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.15032328 +0000 UTC m=+1143.007509979" lastFinishedPulling="2026-01-04 12:07:52.12269597 +0000 UTC m=+1170.979882679" observedRunningTime="2026-01-04 12:07:53.202858292 +0000 UTC m=+1172.060045021" watchObservedRunningTime="2026-01-04 12:07:53.227250943 +0000 UTC m=+1172.084437652" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.229934 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-txsr9" podStartSLOduration=15.578961462 podStartE2EDuration="25.229915174s" podCreationTimestamp="2026-01-04 12:07:28 +0000 UTC" firstStartedPulling="2026-01-04 12:07:42.213398994 +0000 UTC m=+1161.070585743" lastFinishedPulling="2026-01-04 12:07:51.864352746 +0000 UTC m=+1170.721539455" observedRunningTime="2026-01-04 12:07:53.217498613 +0000 UTC m=+1172.074685312" watchObservedRunningTime="2026-01-04 12:07:53.229915174 +0000 UTC m=+1172.087101883" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.230835 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.231041 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.231088 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.231140 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp4w5\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.231176 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.332145 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.332579 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.332607 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.332670 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.332767 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:07:53.832734748 +0000 UTC m=+1172.689921657 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.332624 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp4w5\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.333081 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.333324 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.333396 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.333739 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.333771 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.353266 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.353641 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp4w5\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.487669 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6cb8a97-f0f7-4182-901b-84c315967b1f" path="/var/lib/kubelet/pods/a6cb8a97-f0f7-4182-901b-84c315967b1f/volumes" Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.842834 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.843070 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.843204 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: E0104 12:07:53.843262 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:07:54.84324515 +0000 UTC m=+1173.700431859 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : configmap "swift-ring-files" not found Jan 04 12:07:53 crc kubenswrapper[4797]: I0104 12:07:53.897139 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.014231 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" event={"ID":"1650ba5a-ccad-4447-ad19-02185249e682","Type":"ContainerStarted","Data":"44067c99ea4de70545c8a94b5364d81bb441239ca0aaa302aa476fd8d4cfce0a"} Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.014401 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.017156 4797 generic.go:334] "Generic (PLEG): container finished" podID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerID="e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e" exitCode=0 Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.017499 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerDied","Data":"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e"} Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.018757 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.018787 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.050668 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" podStartSLOduration=3.050646324 podStartE2EDuration="3.050646324s" podCreationTimestamp="2026-01-04 12:07:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:07:54.04486495 +0000 UTC m=+1172.902051689" watchObservedRunningTime="2026-01-04 12:07:54.050646324 +0000 UTC m=+1172.907833033" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.079526 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.859759 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:54 crc kubenswrapper[4797]: E0104 12:07:54.860050 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:07:54 crc kubenswrapper[4797]: E0104 12:07:54.860103 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:07:54 crc kubenswrapper[4797]: E0104 12:07:54.860203 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:07:56.860171146 +0000 UTC m=+1175.717357895 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : configmap "swift-ring-files" not found Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.896836 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:54 crc kubenswrapper[4797]: I0104 12:07:54.959952 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.029456 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerStarted","Data":"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba"} Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.071124 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=29.802236241 podStartE2EDuration="47.071108674s" podCreationTimestamp="2026-01-04 12:07:08 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.93660838 +0000 UTC m=+1143.793795089" lastFinishedPulling="2026-01-04 12:07:42.205480773 +0000 UTC m=+1161.062667522" observedRunningTime="2026-01-04 12:07:55.056159475 +0000 UTC m=+1173.913346254" watchObservedRunningTime="2026-01-04 12:07:55.071108674 +0000 UTC m=+1173.928295383" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.100771 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.306125 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.307960 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.310118 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.311270 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-96vpb" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.312625 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.313072 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.334376 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467703 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467748 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467772 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467799 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467848 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtbhg\" (UniqueName: \"kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.467976 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.468059 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569191 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569264 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtbhg\" (UniqueName: \"kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569361 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569431 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569469 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569494 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.569523 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.570073 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.570435 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.570503 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.575509 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.575762 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.576448 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.593018 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtbhg\" (UniqueName: \"kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg\") pod \"ovn-northd-0\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " pod="openstack/ovn-northd-0" Jan 04 12:07:55 crc kubenswrapper[4797]: I0104 12:07:55.629711 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.039531 4797 generic.go:334] "Generic (PLEG): container finished" podID="c1955fef-0f64-4332-b967-c50875302a97" containerID="96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444" exitCode=0 Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.039620 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerDied","Data":"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444"} Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.134845 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:07:56 crc kubenswrapper[4797]: E0104 12:07:56.803069 4797 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.22:41640->38.102.83.22:38015: write tcp 38.102.83.22:41640->38.102.83.22:38015: write: broken pipe Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.895276 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:07:56 crc kubenswrapper[4797]: E0104 12:07:56.895626 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:07:56 crc kubenswrapper[4797]: E0104 12:07:56.895684 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:07:56 crc kubenswrapper[4797]: E0104 12:07:56.895777 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:08:00.895749681 +0000 UTC m=+1179.752936410 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : configmap "swift-ring-files" not found Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.964616 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-ppk5k"] Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.967067 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.969357 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.969557 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.969686 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 04 12:07:56 crc kubenswrapper[4797]: I0104 12:07:56.976351 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-ppk5k"] Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.049603 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerStarted","Data":"c50bef501c809c20be6a4846aacee0622f1dcd7827f65a9f04cfa86cbfb87b09"} Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.053076 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerStarted","Data":"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717"} Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.073392 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371985.781399 podStartE2EDuration="51.073377131s" podCreationTimestamp="2026-01-04 12:07:06 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.147807523 +0000 UTC m=+1143.004994232" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:07:57.071293386 +0000 UTC m=+1175.928480105" watchObservedRunningTime="2026-01-04 12:07:57.073377131 +0000 UTC m=+1175.930563840" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111563 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111637 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111668 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111688 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw6vb\" (UniqueName: \"kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111716 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111747 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.111782 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.212861 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.212983 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213126 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213245 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213317 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213347 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213379 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw6vb\" (UniqueName: \"kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.213971 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.214044 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.214200 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.219794 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.219910 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.220179 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.231193 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw6vb\" (UniqueName: \"kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb\") pod \"swift-ring-rebalance-ppk5k\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.325588 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:07:57 crc kubenswrapper[4797]: I0104 12:07:57.763366 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-ppk5k"] Jan 04 12:07:57 crc kubenswrapper[4797]: W0104 12:07:57.771602 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a2256c1_a1b7_4c63_92be_8283893ede12.slice/crio-b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c WatchSource:0}: Error finding container b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c: Status 404 returned error can't find the container with id b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.066392 4797 generic.go:334] "Generic (PLEG): container finished" podID="1414255a-a94a-4508-aa55-4ad9837afbea" containerID="0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5" exitCode=0 Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.066494 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerDied","Data":"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5"} Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.068969 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-ppk5k" event={"ID":"4a2256c1-a1b7-4c63-92be-8283893ede12","Type":"ContainerStarted","Data":"b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c"} Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.072042 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerStarted","Data":"b2a3aa87e9987ca244304550b8c7cb9d2bc8fd403c42e8b33895b09bf3e9a6f0"} Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.072094 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerStarted","Data":"58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47"} Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.073052 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.075073 4797 generic.go:334] "Generic (PLEG): container finished" podID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerID="245e10bca25f08bc626b5aac80b4d7c9c27d64f07c83fb38359bbb8adbb0f904" exitCode=0 Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.075138 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerDied","Data":"245e10bca25f08bc626b5aac80b4d7c9c27d64f07c83fb38359bbb8adbb0f904"} Jan 04 12:07:58 crc kubenswrapper[4797]: E0104 12:07:58.093422 4797 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.22:41654->38.102.83.22:38015: write tcp 38.102.83.22:41654->38.102.83.22:38015: write: broken pipe Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.154029 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.973212589 podStartE2EDuration="3.153972275s" podCreationTimestamp="2026-01-04 12:07:55 +0000 UTC" firstStartedPulling="2026-01-04 12:07:56.15587513 +0000 UTC m=+1175.013061839" lastFinishedPulling="2026-01-04 12:07:57.336634816 +0000 UTC m=+1176.193821525" observedRunningTime="2026-01-04 12:07:58.134206708 +0000 UTC m=+1176.991393427" watchObservedRunningTime="2026-01-04 12:07:58.153972275 +0000 UTC m=+1177.011159004" Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.272798 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 04 12:07:58 crc kubenswrapper[4797]: I0104 12:07:58.272833 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.087631 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerStarted","Data":"6c2bd4e26c7793a7e6748b52de489f7127e21a2457e862fa1b66701b1c8d40a1"} Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.088464 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.090588 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerStarted","Data":"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9"} Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.090806 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.092513 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"681bdc49-0a76-4a29-b7c0-1f4e051636fb","Type":"ContainerStarted","Data":"5cf863e6c4e567a92860c363b3a8fa6be4741f27258593aecd9da7c965fb66a2"} Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.093261 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.124245 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.470399743 podStartE2EDuration="54.124226135s" podCreationTimestamp="2026-01-04 12:07:05 +0000 UTC" firstStartedPulling="2026-01-04 12:07:13.586876198 +0000 UTC m=+1132.444062907" lastFinishedPulling="2026-01-04 12:07:24.24070257 +0000 UTC m=+1143.097889299" observedRunningTime="2026-01-04 12:07:59.113206961 +0000 UTC m=+1177.970393700" watchObservedRunningTime="2026-01-04 12:07:59.124226135 +0000 UTC m=+1177.981412844" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.134683 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.407129103 podStartE2EDuration="48.134664834s" podCreationTimestamp="2026-01-04 12:07:11 +0000 UTC" firstStartedPulling="2026-01-04 12:07:24.128283972 +0000 UTC m=+1142.985470681" lastFinishedPulling="2026-01-04 12:07:58.855819693 +0000 UTC m=+1177.713006412" observedRunningTime="2026-01-04 12:07:59.134026437 +0000 UTC m=+1177.991213146" watchObservedRunningTime="2026-01-04 12:07:59.134664834 +0000 UTC m=+1177.991851543" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.162430 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=52.654707343 podStartE2EDuration="54.162409554s" podCreationTimestamp="2026-01-04 12:07:05 +0000 UTC" firstStartedPulling="2026-01-04 12:07:22.733718209 +0000 UTC m=+1141.590904928" lastFinishedPulling="2026-01-04 12:07:24.24142043 +0000 UTC m=+1143.098607139" observedRunningTime="2026-01-04 12:07:59.161792158 +0000 UTC m=+1178.018978867" watchObservedRunningTime="2026-01-04 12:07:59.162409554 +0000 UTC m=+1178.019596263" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.518865 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.518917 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:59 crc kubenswrapper[4797]: I0104 12:07:59.615790 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:00 crc kubenswrapper[4797]: I0104 12:08:00.178169 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:00 crc kubenswrapper[4797]: I0104 12:08:00.978666 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:08:00 crc kubenswrapper[4797]: E0104 12:08:00.978865 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:00 crc kubenswrapper[4797]: E0104 12:08:00.978897 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:00 crc kubenswrapper[4797]: E0104 12:08:00.978955 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:08:08.978938815 +0000 UTC m=+1187.836125524 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : configmap "swift-ring-files" not found Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.116173 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-ppk5k" event={"ID":"4a2256c1-a1b7-4c63-92be-8283893ede12","Type":"ContainerStarted","Data":"f4fe31bebac4759860a28ee2a6a704b01148a21eef8e356261b41ddb81be35c1"} Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.144453 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-ppk5k" podStartSLOduration=2.529682101 podStartE2EDuration="6.144436705s" podCreationTimestamp="2026-01-04 12:07:56 +0000 UTC" firstStartedPulling="2026-01-04 12:07:57.77336378 +0000 UTC m=+1176.630550489" lastFinishedPulling="2026-01-04 12:08:01.388118384 +0000 UTC m=+1180.245305093" observedRunningTime="2026-01-04 12:08:02.144091086 +0000 UTC m=+1181.001277805" watchObservedRunningTime="2026-01-04 12:08:02.144436705 +0000 UTC m=+1181.001623404" Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.285043 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.361587 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.361903 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="dnsmasq-dns" containerID="cri-o://b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6" gracePeriod=10 Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.436392 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.581064 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 04 12:08:02 crc kubenswrapper[4797]: I0104 12:08:02.896398 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.011010 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc\") pod \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.011060 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb\") pod \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.011223 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nf6c\" (UniqueName: \"kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c\") pod \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.011243 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb\") pod \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.011280 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config\") pod \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\" (UID: \"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00\") " Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.017183 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c" (OuterVolumeSpecName: "kube-api-access-9nf6c") pod "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" (UID: "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00"). InnerVolumeSpecName "kube-api-access-9nf6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.052016 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config" (OuterVolumeSpecName: "config") pod "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" (UID: "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.054167 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" (UID: "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.061450 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" (UID: "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.065983 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" (UID: "7a19e9c6-64aa-43f2-8b5f-06dbc275ba00"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.112687 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nf6c\" (UniqueName: \"kubernetes.io/projected/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-kube-api-access-9nf6c\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.112713 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.112722 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.112729 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.112737 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.124880 4797 generic.go:334] "Generic (PLEG): container finished" podID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerID="b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6" exitCode=0 Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.124951 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.124979 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" event={"ID":"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00","Type":"ContainerDied","Data":"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6"} Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.125044 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-w9qc6" event={"ID":"7a19e9c6-64aa-43f2-8b5f-06dbc275ba00","Type":"ContainerDied","Data":"b8ed824316e3b48d53b1a8dca3a1545d12c9e9d5c5aea13b73d84344c52f8d46"} Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.125069 4797 scope.go:117] "RemoveContainer" containerID="b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.144058 4797 scope.go:117] "RemoveContainer" containerID="1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.182549 4797 scope.go:117] "RemoveContainer" containerID="b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.183857 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:08:03 crc kubenswrapper[4797]: E0104 12:08:03.184313 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6\": container with ID starting with b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6 not found: ID does not exist" containerID="b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.184357 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6"} err="failed to get container status \"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6\": rpc error: code = NotFound desc = could not find container \"b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6\": container with ID starting with b70e5d45fba80e836bcd7214d030d4f3dcb09d3feb5f94249703a888e3dda2f6 not found: ID does not exist" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.184385 4797 scope.go:117] "RemoveContainer" containerID="1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80" Jan 04 12:08:03 crc kubenswrapper[4797]: E0104 12:08:03.184823 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80\": container with ID starting with 1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80 not found: ID does not exist" containerID="1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.184862 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80"} err="failed to get container status \"1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80\": rpc error: code = NotFound desc = could not find container \"1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80\": container with ID starting with 1d8c54c66660889ef8cb9e8bec5ca9153e7eb911e11a935aba9086c839a0aa80 not found: ID does not exist" Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.197458 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-w9qc6"] Jan 04 12:08:03 crc kubenswrapper[4797]: I0104 12:08:03.492376 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" path="/var/lib/kubelet/pods/7a19e9c6-64aa-43f2-8b5f-06dbc275ba00/volumes" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.712499 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-qh9b4"] Jan 04 12:08:06 crc kubenswrapper[4797]: E0104 12:08:06.713155 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="init" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.713171 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="init" Jan 04 12:08:06 crc kubenswrapper[4797]: E0104 12:08:06.713184 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="dnsmasq-dns" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.713192 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="dnsmasq-dns" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.713403 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a19e9c6-64aa-43f2-8b5f-06dbc275ba00" containerName="dnsmasq-dns" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.714131 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.721393 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qh9b4"] Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.736616 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.875078 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.875144 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94z8w\" (UniqueName: \"kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.977378 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.977496 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94z8w\" (UniqueName: \"kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.978205 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:06 crc kubenswrapper[4797]: I0104 12:08:06.995510 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94z8w\" (UniqueName: \"kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w\") pod \"root-account-create-update-qh9b4\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:07 crc kubenswrapper[4797]: I0104 12:08:07.052556 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:07 crc kubenswrapper[4797]: I0104 12:08:07.526429 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qh9b4"] Jan 04 12:08:07 crc kubenswrapper[4797]: W0104 12:08:07.533346 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod834e2733_f822_45e1_aa2c_bbefe9d0cca7.slice/crio-52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff WatchSource:0}: Error finding container 52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff: Status 404 returned error can't find the container with id 52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff Jan 04 12:08:08 crc kubenswrapper[4797]: I0104 12:08:08.166252 4797 generic.go:334] "Generic (PLEG): container finished" podID="834e2733-f822-45e1-aa2c-bbefe9d0cca7" containerID="c4b06a39de7f8fcb4a0aabdb535223544b26a0c5df8ee9344130d0f22a2c7aa2" exitCode=0 Jan 04 12:08:08 crc kubenswrapper[4797]: I0104 12:08:08.166425 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qh9b4" event={"ID":"834e2733-f822-45e1-aa2c-bbefe9d0cca7","Type":"ContainerDied","Data":"c4b06a39de7f8fcb4a0aabdb535223544b26a0c5df8ee9344130d0f22a2c7aa2"} Jan 04 12:08:08 crc kubenswrapper[4797]: I0104 12:08:08.168403 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qh9b4" event={"ID":"834e2733-f822-45e1-aa2c-bbefe9d0cca7","Type":"ContainerStarted","Data":"52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff"} Jan 04 12:08:08 crc kubenswrapper[4797]: I0104 12:08:08.170907 4797 generic.go:334] "Generic (PLEG): container finished" podID="4a2256c1-a1b7-4c63-92be-8283893ede12" containerID="f4fe31bebac4759860a28ee2a6a704b01148a21eef8e356261b41ddb81be35c1" exitCode=0 Jan 04 12:08:08 crc kubenswrapper[4797]: I0104 12:08:08.171029 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-ppk5k" event={"ID":"4a2256c1-a1b7-4c63-92be-8283893ede12","Type":"ContainerDied","Data":"f4fe31bebac4759860a28ee2a6a704b01148a21eef8e356261b41ddb81be35c1"} Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.014397 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.028500 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"swift-storage-0\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " pod="openstack/swift-storage-0" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.104176 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.525047 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-dmxvk"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.526611 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.533593 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dmxvk"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.617117 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-fe06-account-create-update-6qbzh"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.618109 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.619840 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.629521 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fe06-account-create-update-6qbzh"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.631137 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7sxc\" (UniqueName: \"kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.631265 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.651274 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.654388 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.709697 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:08:09 crc kubenswrapper[4797]: W0104 12:08:09.711954 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78ea6800_bdfe_4593_8aad_7aaba5be8897.slice/crio-c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15 WatchSource:0}: Error finding container c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15: Status 404 returned error can't find the container with id c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15 Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732189 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732524 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732582 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94z8w\" (UniqueName: \"kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w\") pod \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732618 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw6vb\" (UniqueName: \"kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732650 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts\") pod \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\" (UID: \"834e2733-f822-45e1-aa2c-bbefe9d0cca7\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732685 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732735 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732808 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.732843 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts\") pod \"4a2256c1-a1b7-4c63-92be-8283893ede12\" (UID: \"4a2256c1-a1b7-4c63-92be-8283893ede12\") " Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.733076 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgr92\" (UniqueName: \"kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.733131 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7sxc\" (UniqueName: \"kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.733212 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.733287 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.733781 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.735265 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "834e2733-f822-45e1-aa2c-bbefe9d0cca7" (UID: "834e2733-f822-45e1-aa2c-bbefe9d0cca7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.735767 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.736528 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.739389 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb" (OuterVolumeSpecName: "kube-api-access-fw6vb") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "kube-api-access-fw6vb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.741162 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.742021 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w" (OuterVolumeSpecName: "kube-api-access-94z8w") pod "834e2733-f822-45e1-aa2c-bbefe9d0cca7" (UID: "834e2733-f822-45e1-aa2c-bbefe9d0cca7"). InnerVolumeSpecName "kube-api-access-94z8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.749045 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7sxc\" (UniqueName: \"kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc\") pod \"keystone-db-create-dmxvk\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.754624 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts" (OuterVolumeSpecName: "scripts") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.754682 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.762441 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a2256c1-a1b7-4c63-92be-8283893ede12" (UID: "4a2256c1-a1b7-4c63-92be-8283893ede12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.808739 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lbhjz"] Jan 04 12:08:09 crc kubenswrapper[4797]: E0104 12:08:09.809087 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2256c1-a1b7-4c63-92be-8283893ede12" containerName="swift-ring-rebalance" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.809101 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2256c1-a1b7-4c63-92be-8283893ede12" containerName="swift-ring-rebalance" Jan 04 12:08:09 crc kubenswrapper[4797]: E0104 12:08:09.809116 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="834e2733-f822-45e1-aa2c-bbefe9d0cca7" containerName="mariadb-account-create-update" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.809122 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="834e2733-f822-45e1-aa2c-bbefe9d0cca7" containerName="mariadb-account-create-update" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.809276 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="834e2733-f822-45e1-aa2c-bbefe9d0cca7" containerName="mariadb-account-create-update" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.809286 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a2256c1-a1b7-4c63-92be-8283893ede12" containerName="swift-ring-rebalance" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.809777 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.819894 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lbhjz"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835197 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgr92\" (UniqueName: \"kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835310 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835422 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw6vb\" (UniqueName: \"kubernetes.io/projected/4a2256c1-a1b7-4c63-92be-8283893ede12-kube-api-access-fw6vb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835439 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/834e2733-f822-45e1-aa2c-bbefe9d0cca7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835452 4797 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835464 4797 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835475 4797 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4a2256c1-a1b7-4c63-92be-8283893ede12-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835487 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256c1-a1b7-4c63-92be-8283893ede12-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835498 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835510 4797 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4a2256c1-a1b7-4c63-92be-8283893ede12-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.835522 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94z8w\" (UniqueName: \"kubernetes.io/projected/834e2733-f822-45e1-aa2c-bbefe9d0cca7-kube-api-access-94z8w\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.836143 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.850543 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgr92\" (UniqueName: \"kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92\") pod \"keystone-fe06-account-create-update-6qbzh\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.918317 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-0a7e-account-create-update-kv964"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.919845 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.923101 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.938083 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0a7e-account-create-update-kv964"] Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.945979 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.946245 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgcfw\" (UniqueName: \"kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.951862 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:09 crc kubenswrapper[4797]: I0104 12:08:09.965289 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.048483 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.048560 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgcfw\" (UniqueName: \"kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.048590 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wpcv\" (UniqueName: \"kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.048631 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.049368 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.068673 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-698r7"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.069632 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.083447 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgcfw\" (UniqueName: \"kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw\") pod \"placement-db-create-lbhjz\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.108958 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-698r7"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.128204 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.150768 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.150857 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wpcv\" (UniqueName: \"kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.151517 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.158775 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4447-account-create-update-k6c6m"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.159974 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.162928 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.173165 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4447-account-create-update-k6c6m"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.181820 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wpcv\" (UniqueName: \"kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv\") pod \"placement-0a7e-account-create-update-kv964\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.188590 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qh9b4" event={"ID":"834e2733-f822-45e1-aa2c-bbefe9d0cca7","Type":"ContainerDied","Data":"52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff"} Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.188614 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qh9b4" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.188625 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52ebff40734f6e943bf863aa370682bc562cde23e0aa96c9f33ea5aee79d60ff" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.190129 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ppk5k" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.190646 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-ppk5k" event={"ID":"4a2256c1-a1b7-4c63-92be-8283893ede12","Type":"ContainerDied","Data":"b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c"} Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.190704 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b174ed2aa32cd088c9f9bfe8fec5618084d77fd2304149e3c24ef7288724a50c" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.192112 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15"} Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.254170 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.254495 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx8j4\" (UniqueName: \"kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.254534 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.254581 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72zzr\" (UniqueName: \"kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.254624 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.357008 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.357412 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72zzr\" (UniqueName: \"kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.357487 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.357565 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx8j4\" (UniqueName: \"kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.358512 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.359071 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.376915 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72zzr\" (UniqueName: \"kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr\") pod \"glance-db-create-698r7\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.376941 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx8j4\" (UniqueName: \"kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4\") pod \"glance-4447-account-create-update-k6c6m\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.437343 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-698r7" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.466744 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dmxvk"] Jan 04 12:08:10 crc kubenswrapper[4797]: W0104 12:08:10.470228 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f1faf93_a80e_424a_b37b_0dc4506c5716.slice/crio-573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b WatchSource:0}: Error finding container 573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b: Status 404 returned error can't find the container with id 573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.534704 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fe06-account-create-update-6qbzh"] Jan 04 12:08:10 crc kubenswrapper[4797]: W0104 12:08:10.539035 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb242917a_c1a0_4747_a162_d4d13d917682.slice/crio-ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e WatchSource:0}: Error finding container ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e: Status 404 returned error can't find the container with id ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.550198 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.688123 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lbhjz"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.706017 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-698r7"] Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.730930 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 04 12:08:10 crc kubenswrapper[4797]: W0104 12:08:10.740427 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21b80b72_ea6e_4983_81b5_f0482f65b8a1.slice/crio-273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd WatchSource:0}: Error finding container 273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd: Status 404 returned error can't find the container with id 273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd Jan 04 12:08:10 crc kubenswrapper[4797]: I0104 12:08:10.758261 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0a7e-account-create-update-kv964"] Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.044473 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4447-account-create-update-k6c6m"] Jan 04 12:08:11 crc kubenswrapper[4797]: W0104 12:08:11.054090 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52a647a9_b6cd_434c_a388_25def81293c4.slice/crio-df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050 WatchSource:0}: Error finding container df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050: Status 404 returned error can't find the container with id df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050 Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.207411 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-kv964" event={"ID":"7e253c02-bdd1-46d2-b93e-e995108a867b","Type":"ContainerStarted","Data":"c893edf3b97be1c8c7f521e8f94c429b5040ef1eb7fdfb089bb26d03f0b7689d"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.207772 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-kv964" event={"ID":"7e253c02-bdd1-46d2-b93e-e995108a867b","Type":"ContainerStarted","Data":"95f16c410cfd4495a1fcbbb86b0986aa58d05cd237f4b6fcd838edb492804f22"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.210388 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-698r7" event={"ID":"21b80b72-ea6e-4983-81b5-f0482f65b8a1","Type":"ContainerStarted","Data":"5f3a604178468117d07b94420a105b1d9dc97d1da1e7ba0ee92010cef552b3f1"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.210434 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-698r7" event={"ID":"21b80b72-ea6e-4983-81b5-f0482f65b8a1","Type":"ContainerStarted","Data":"273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.212711 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbhjz" event={"ID":"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6","Type":"ContainerStarted","Data":"186a9d18142f1a946ca7ae7698d3d92717cd8c9dba8656f7d9054ca1fcf3d306"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.212755 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbhjz" event={"ID":"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6","Type":"ContainerStarted","Data":"24107a4203ac76ea8745bd189dea7857955af1b21d4b17d84733795047cda637"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.214886 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4447-account-create-update-k6c6m" event={"ID":"52a647a9-b6cd-434c-a388-25def81293c4","Type":"ContainerStarted","Data":"df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.216248 4797 generic.go:334] "Generic (PLEG): container finished" podID="6f1faf93-a80e-424a-b37b-0dc4506c5716" containerID="9d2278141a7532207d8e73b992de20ebd9c40d4061f0a7d49e36b0f7ad19b5c7" exitCode=0 Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.216303 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dmxvk" event={"ID":"6f1faf93-a80e-424a-b37b-0dc4506c5716","Type":"ContainerDied","Data":"9d2278141a7532207d8e73b992de20ebd9c40d4061f0a7d49e36b0f7ad19b5c7"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.216319 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dmxvk" event={"ID":"6f1faf93-a80e-424a-b37b-0dc4506c5716","Type":"ContainerStarted","Data":"573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.227877 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-0a7e-account-create-update-kv964" podStartSLOduration=2.227857172 podStartE2EDuration="2.227857172s" podCreationTimestamp="2026-01-04 12:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:11.223482786 +0000 UTC m=+1190.080669505" watchObservedRunningTime="2026-01-04 12:08:11.227857172 +0000 UTC m=+1190.085043871" Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.228552 4797 generic.go:334] "Generic (PLEG): container finished" podID="b242917a-c1a0-4747-a162-d4d13d917682" containerID="8e5d7c350df979ff40b9c3dcb14ce546947dc7c0a8d44e33037dd6c84c3f15fe" exitCode=0 Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.228600 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fe06-account-create-update-6qbzh" event={"ID":"b242917a-c1a0-4747-a162-d4d13d917682","Type":"ContainerDied","Data":"8e5d7c350df979ff40b9c3dcb14ce546947dc7c0a8d44e33037dd6c84c3f15fe"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.228629 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fe06-account-create-update-6qbzh" event={"ID":"b242917a-c1a0-4747-a162-d4d13d917682","Type":"ContainerStarted","Data":"ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e"} Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.244731 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-698r7" podStartSLOduration=1.244715992 podStartE2EDuration="1.244715992s" podCreationTimestamp="2026-01-04 12:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:11.242568385 +0000 UTC m=+1190.099755094" watchObservedRunningTime="2026-01-04 12:08:11.244715992 +0000 UTC m=+1190.101902701" Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.278623 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-lbhjz" podStartSLOduration=2.2786053969999998 podStartE2EDuration="2.278605397s" podCreationTimestamp="2026-01-04 12:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:11.26709667 +0000 UTC m=+1190.124283379" watchObservedRunningTime="2026-01-04 12:08:11.278605397 +0000 UTC m=+1190.135792106" Jan 04 12:08:11 crc kubenswrapper[4797]: I0104 12:08:11.652958 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.237432 4797 generic.go:334] "Generic (PLEG): container finished" podID="21b80b72-ea6e-4983-81b5-f0482f65b8a1" containerID="5f3a604178468117d07b94420a105b1d9dc97d1da1e7ba0ee92010cef552b3f1" exitCode=0 Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.237539 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-698r7" event={"ID":"21b80b72-ea6e-4983-81b5-f0482f65b8a1","Type":"ContainerDied","Data":"5f3a604178468117d07b94420a105b1d9dc97d1da1e7ba0ee92010cef552b3f1"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.239804 4797 generic.go:334] "Generic (PLEG): container finished" podID="91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" containerID="186a9d18142f1a946ca7ae7698d3d92717cd8c9dba8656f7d9054ca1fcf3d306" exitCode=0 Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.239863 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbhjz" event={"ID":"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6","Type":"ContainerDied","Data":"186a9d18142f1a946ca7ae7698d3d92717cd8c9dba8656f7d9054ca1fcf3d306"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.244172 4797 generic.go:334] "Generic (PLEG): container finished" podID="52a647a9-b6cd-434c-a388-25def81293c4" containerID="d29ca7741994258ece164501fcffe7239b50963d2ad0c1de905c698fc6ee8679" exitCode=0 Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.244271 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4447-account-create-update-k6c6m" event={"ID":"52a647a9-b6cd-434c-a388-25def81293c4","Type":"ContainerDied","Data":"d29ca7741994258ece164501fcffe7239b50963d2ad0c1de905c698fc6ee8679"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.246224 4797 generic.go:334] "Generic (PLEG): container finished" podID="7e253c02-bdd1-46d2-b93e-e995108a867b" containerID="c893edf3b97be1c8c7f521e8f94c429b5040ef1eb7fdfb089bb26d03f0b7689d" exitCode=0 Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.246325 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-kv964" event={"ID":"7e253c02-bdd1-46d2-b93e-e995108a867b","Type":"ContainerDied","Data":"c893edf3b97be1c8c7f521e8f94c429b5040ef1eb7fdfb089bb26d03f0b7689d"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.249392 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"99d34e3c79b062431c9d84a0e920a2cea64a5e8ddf3dd8c6b4b199964f36fd85"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.249442 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"95970be1d420d961aa5faba4feae52bf2847295f0bff54ad0ab5ecfc128f1139"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.249458 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"f92a49d431c52702876723a17e42bd64b37bc0d9cd0421a5b6941ec41e2dc6b7"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.249470 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"3878ed80aaf358279c48ab4d9c9c529e7175864a9d22b44343cee0f365760367"} Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.656580 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.661973 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.798779 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts\") pod \"6f1faf93-a80e-424a-b37b-0dc4506c5716\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.798950 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts\") pod \"b242917a-c1a0-4747-a162-d4d13d917682\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.799070 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgr92\" (UniqueName: \"kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92\") pod \"b242917a-c1a0-4747-a162-d4d13d917682\" (UID: \"b242917a-c1a0-4747-a162-d4d13d917682\") " Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.799142 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7sxc\" (UniqueName: \"kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc\") pod \"6f1faf93-a80e-424a-b37b-0dc4506c5716\" (UID: \"6f1faf93-a80e-424a-b37b-0dc4506c5716\") " Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.799522 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6f1faf93-a80e-424a-b37b-0dc4506c5716" (UID: "6f1faf93-a80e-424a-b37b-0dc4506c5716"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.799754 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f1faf93-a80e-424a-b37b-0dc4506c5716-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.799874 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b242917a-c1a0-4747-a162-d4d13d917682" (UID: "b242917a-c1a0-4747-a162-d4d13d917682"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.805301 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc" (OuterVolumeSpecName: "kube-api-access-q7sxc") pod "6f1faf93-a80e-424a-b37b-0dc4506c5716" (UID: "6f1faf93-a80e-424a-b37b-0dc4506c5716"). InnerVolumeSpecName "kube-api-access-q7sxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.806057 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92" (OuterVolumeSpecName: "kube-api-access-kgr92") pod "b242917a-c1a0-4747-a162-d4d13d917682" (UID: "b242917a-c1a0-4747-a162-d4d13d917682"). InnerVolumeSpecName "kube-api-access-kgr92". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.903946 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b242917a-c1a0-4747-a162-d4d13d917682-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.904037 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgr92\" (UniqueName: \"kubernetes.io/projected/b242917a-c1a0-4747-a162-d4d13d917682-kube-api-access-kgr92\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:12 crc kubenswrapper[4797]: I0104 12:08:12.904061 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7sxc\" (UniqueName: \"kubernetes.io/projected/6f1faf93-a80e-424a-b37b-0dc4506c5716-kube-api-access-q7sxc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.199672 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qh9b4"] Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.207365 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-qh9b4"] Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.261448 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dmxvk" event={"ID":"6f1faf93-a80e-424a-b37b-0dc4506c5716","Type":"ContainerDied","Data":"573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b"} Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.261498 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="573387d335eefd1b7ee3bc96ef51119fa81a806e6d7d35983e77bb5f7580288b" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.261507 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dmxvk" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.265329 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-6qbzh" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.273214 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fe06-account-create-update-6qbzh" event={"ID":"b242917a-c1a0-4747-a162-d4d13d917682","Type":"ContainerDied","Data":"ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e"} Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.273285 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae3d4cae632a6f5d38e6d15fc1cd65627820fb59ed218ef02244c11b12da431e" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.484001 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="834e2733-f822-45e1-aa2c-bbefe9d0cca7" path="/var/lib/kubelet/pods/834e2733-f822-45e1-aa2c-bbefe9d0cca7/volumes" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.812088 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.819381 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-698r7" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.829802 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.851312 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859169 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts\") pod \"52a647a9-b6cd-434c-a388-25def81293c4\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859221 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wpcv\" (UniqueName: \"kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv\") pod \"7e253c02-bdd1-46d2-b93e-e995108a867b\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859268 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgcfw\" (UniqueName: \"kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw\") pod \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859326 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts\") pod \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\" (UID: \"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859373 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts\") pod \"7e253c02-bdd1-46d2-b93e-e995108a867b\" (UID: \"7e253c02-bdd1-46d2-b93e-e995108a867b\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859436 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts\") pod \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859457 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72zzr\" (UniqueName: \"kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr\") pod \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\" (UID: \"21b80b72-ea6e-4983-81b5-f0482f65b8a1\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859507 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx8j4\" (UniqueName: \"kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4\") pod \"52a647a9-b6cd-434c-a388-25def81293c4\" (UID: \"52a647a9-b6cd-434c-a388-25def81293c4\") " Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859836 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52a647a9-b6cd-434c-a388-25def81293c4" (UID: "52a647a9-b6cd-434c-a388-25def81293c4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859895 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e253c02-bdd1-46d2-b93e-e995108a867b" (UID: "7e253c02-bdd1-46d2-b93e-e995108a867b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.859928 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" (UID: "91d21bdd-4ab0-47ed-9a86-cd1473ce90a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.860249 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "21b80b72-ea6e-4983-81b5-f0482f65b8a1" (UID: "21b80b72-ea6e-4983-81b5-f0482f65b8a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.864503 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv" (OuterVolumeSpecName: "kube-api-access-7wpcv") pod "7e253c02-bdd1-46d2-b93e-e995108a867b" (UID: "7e253c02-bdd1-46d2-b93e-e995108a867b"). InnerVolumeSpecName "kube-api-access-7wpcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.864639 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw" (OuterVolumeSpecName: "kube-api-access-rgcfw") pod "91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" (UID: "91d21bdd-4ab0-47ed-9a86-cd1473ce90a6"). InnerVolumeSpecName "kube-api-access-rgcfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.874597 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4" (OuterVolumeSpecName: "kube-api-access-qx8j4") pod "52a647a9-b6cd-434c-a388-25def81293c4" (UID: "52a647a9-b6cd-434c-a388-25def81293c4"). InnerVolumeSpecName "kube-api-access-qx8j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.883648 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr" (OuterVolumeSpecName: "kube-api-access-72zzr") pod "21b80b72-ea6e-4983-81b5-f0482f65b8a1" (UID: "21b80b72-ea6e-4983-81b5-f0482f65b8a1"). InnerVolumeSpecName "kube-api-access-72zzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961781 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52a647a9-b6cd-434c-a388-25def81293c4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961826 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wpcv\" (UniqueName: \"kubernetes.io/projected/7e253c02-bdd1-46d2-b93e-e995108a867b-kube-api-access-7wpcv\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961840 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgcfw\" (UniqueName: \"kubernetes.io/projected/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-kube-api-access-rgcfw\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961849 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961858 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e253c02-bdd1-46d2-b93e-e995108a867b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961868 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/21b80b72-ea6e-4983-81b5-f0482f65b8a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961877 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72zzr\" (UniqueName: \"kubernetes.io/projected/21b80b72-ea6e-4983-81b5-f0482f65b8a1-kube-api-access-72zzr\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:13 crc kubenswrapper[4797]: I0104 12:08:13.961885 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx8j4\" (UniqueName: \"kubernetes.io/projected/52a647a9-b6cd-434c-a388-25def81293c4-kube-api-access-qx8j4\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.278510 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-kv964" event={"ID":"7e253c02-bdd1-46d2-b93e-e995108a867b","Type":"ContainerDied","Data":"95f16c410cfd4495a1fcbbb86b0986aa58d05cd237f4b6fcd838edb492804f22"} Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.278820 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95f16c410cfd4495a1fcbbb86b0986aa58d05cd237f4b6fcd838edb492804f22" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.278934 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-kv964" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.282030 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"4bd8a0778e29226183846c723b3b1dbfb7b51f65f48a65e13a57c6521dc2d967"} Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.294304 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-698r7" event={"ID":"21b80b72-ea6e-4983-81b5-f0482f65b8a1","Type":"ContainerDied","Data":"273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd"} Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.294515 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="273bd51fa44e9e3e89b2207cb4903b3d942a68909db0905007ed0001bc3d34fd" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.294677 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-698r7" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.313123 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbhjz" event={"ID":"91d21bdd-4ab0-47ed-9a86-cd1473ce90a6","Type":"ContainerDied","Data":"24107a4203ac76ea8745bd189dea7857955af1b21d4b17d84733795047cda637"} Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.313181 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24107a4203ac76ea8745bd189dea7857955af1b21d4b17d84733795047cda637" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.313288 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbhjz" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.326059 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4447-account-create-update-k6c6m" event={"ID":"52a647a9-b6cd-434c-a388-25def81293c4","Type":"ContainerDied","Data":"df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050"} Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.326101 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df7bed6aab1ab38a76028bb35ee35e6025635a332d4db9b249c0be45f5d9c050" Jan 04 12:08:14 crc kubenswrapper[4797]: I0104 12:08:14.326172 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4447-account-create-update-k6c6m" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.345963 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"9d917f5809cfdb7d3e5560e391457ae4c770a5118b5655d11d4280c6634d5e65"} Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.346026 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"2b82ee45235bd2492b8f0fbdeb7b02c90e80f762e2a643a80221e2da952c67c8"} Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365464 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-2hjqq"] Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365835 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365852 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365865 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21b80b72-ea6e-4983-81b5-f0482f65b8a1" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365873 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="21b80b72-ea6e-4983-81b5-f0482f65b8a1" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365887 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b242917a-c1a0-4747-a162-d4d13d917682" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365893 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b242917a-c1a0-4747-a162-d4d13d917682" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365902 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f1faf93-a80e-424a-b37b-0dc4506c5716" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365908 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f1faf93-a80e-424a-b37b-0dc4506c5716" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365920 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e253c02-bdd1-46d2-b93e-e995108a867b" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365925 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e253c02-bdd1-46d2-b93e-e995108a867b" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: E0104 12:08:15.365945 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a647a9-b6cd-434c-a388-25def81293c4" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.365951 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a647a9-b6cd-434c-a388-25def81293c4" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366111 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="21b80b72-ea6e-4983-81b5-f0482f65b8a1" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366124 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a647a9-b6cd-434c-a388-25def81293c4" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366138 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366146 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b242917a-c1a0-4747-a162-d4d13d917682" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366154 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e253c02-bdd1-46d2-b93e-e995108a867b" containerName="mariadb-account-create-update" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366166 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f1faf93-a80e-424a-b37b-0dc4506c5716" containerName="mariadb-database-create" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.366711 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.370633 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.371043 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-plvd7" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.375010 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2hjqq"] Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.391218 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.391313 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwz2n\" (UniqueName: \"kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.391426 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.391462 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.492373 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.492431 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.492476 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.492552 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwz2n\" (UniqueName: \"kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.497626 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.497662 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.498365 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.512472 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwz2n\" (UniqueName: \"kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n\") pod \"glance-db-sync-2hjqq\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:15 crc kubenswrapper[4797]: I0104 12:08:15.698219 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2hjqq" Jan 04 12:08:16 crc kubenswrapper[4797]: W0104 12:08:16.208024 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode64d6948_1c4e_4db6_b739_24b2aba46fd3.slice/crio-f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0 WatchSource:0}: Error finding container f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0: Status 404 returned error can't find the container with id f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0 Jan 04 12:08:16 crc kubenswrapper[4797]: I0104 12:08:16.209523 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2hjqq"] Jan 04 12:08:16 crc kubenswrapper[4797]: I0104 12:08:16.357054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2hjqq" event={"ID":"e64d6948-1c4e-4db6-b739-24b2aba46fd3","Type":"ContainerStarted","Data":"f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0"} Jan 04 12:08:16 crc kubenswrapper[4797]: I0104 12:08:16.360978 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"2706f017a019b29eb56e1869f628c26c7a0403d0ac794f7452caa2d980a884e3"} Jan 04 12:08:16 crc kubenswrapper[4797]: I0104 12:08:16.555302 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 04 12:08:16 crc kubenswrapper[4797]: I0104 12:08:16.832957 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.016733 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-76htz"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.018960 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.035115 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c3c0-account-create-update-846gd"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.036100 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.037605 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.048587 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-76htz"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.089362 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c3c0-account-create-update-846gd"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.119230 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-677t7\" (UniqueName: \"kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.119595 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.119623 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpmcg\" (UniqueName: \"kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.119731 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.140953 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lhm69"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.142496 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.175276 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8d59-account-create-update-jf95g"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.177537 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.180287 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.184961 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lhm69"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.193941 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8d59-account-create-update-jf95g"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.221519 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-677t7\" (UniqueName: \"kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.221589 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.221614 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpmcg\" (UniqueName: \"kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.221963 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ppz7\" (UniqueName: \"kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.222076 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.222116 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.222638 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.222748 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.247632 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpmcg\" (UniqueName: \"kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg\") pod \"barbican-db-create-76htz\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.249570 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-677t7\" (UniqueName: \"kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7\") pod \"cinder-c3c0-account-create-update-846gd\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.288444 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-wsnfd"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.289458 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.293420 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zjjcs" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.293574 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.293701 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.293809 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.297246 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-wsnfd"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.322968 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-976mf\" (UniqueName: \"kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.323030 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ppz7\" (UniqueName: \"kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.323067 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.323086 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.323708 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.337564 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-76htz" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.340454 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ppz7\" (UniqueName: \"kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7\") pod \"cinder-db-create-lhm69\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.361273 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.425488 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.425526 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpshm\" (UniqueName: \"kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.425551 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-976mf\" (UniqueName: \"kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.425571 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.425615 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.426688 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.450314 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-976mf\" (UniqueName: \"kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf\") pod \"barbican-8d59-account-create-update-jf95g\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.459620 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rw479"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.460943 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.501829 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.502777 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e050-account-create-update-5j9cp"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.503903 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.505200 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.506552 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.512216 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e050-account-create-update-5j9cp"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.522942 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rw479"] Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.527575 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.527781 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpshm\" (UniqueName: \"kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.527802 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.532828 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.533681 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.546889 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpshm\" (UniqueName: \"kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm\") pod \"keystone-db-sync-wsnfd\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.632372 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.632628 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btxnv\" (UniqueName: \"kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.632737 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.632759 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59bn6\" (UniqueName: \"kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.633041 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.734650 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.734734 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btxnv\" (UniqueName: \"kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.734851 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.734870 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59bn6\" (UniqueName: \"kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.735930 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.736666 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.751782 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59bn6\" (UniqueName: \"kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6\") pod \"neutron-e050-account-create-update-5j9cp\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.755525 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btxnv\" (UniqueName: \"kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv\") pod \"neutron-db-create-rw479\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.779446 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rw479" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.806436 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-76htz"] Jan 04 12:08:17 crc kubenswrapper[4797]: W0104 12:08:17.827343 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9d53eda_fceb_4efa_a0fd_86912378be1c.slice/crio-598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e WatchSource:0}: Error finding container 598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e: Status 404 returned error can't find the container with id 598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.829191 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:17 crc kubenswrapper[4797]: I0104 12:08:17.893963 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c3c0-account-create-update-846gd"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.100155 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lhm69"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.215872 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-gc7cq"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.218974 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.223555 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.227239 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-gc7cq"] Jan 04 12:08:18 crc kubenswrapper[4797]: W0104 12:08:18.249303 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddae99fe4_dbd0_47e7_b9ae_53689293573b.slice/crio-c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69 WatchSource:0}: Error finding container c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69: Status 404 returned error can't find the container with id c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69 Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.252038 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8d59-account-create-update-jf95g"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.274634 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-wsnfd"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.349568 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw9pw\" (UniqueName: \"kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.349860 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.383120 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rw479"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.387363 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c3c0-account-create-update-846gd" event={"ID":"d5b103c7-519f-4b21-a5a0-32e656db2dc9","Type":"ContainerStarted","Data":"04143dacb56865e0ec29514b982ee5edd9082a91bbf7f0fdd465db07b6bc6e9c"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.387416 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c3c0-account-create-update-846gd" event={"ID":"d5b103c7-519f-4b21-a5a0-32e656db2dc9","Type":"ContainerStarted","Data":"709aaa076b44fbe4a20a164fc2490a93790ca4ca76de0fe605bce49ae5fc7140"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.409664 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e050-account-create-update-5j9cp"] Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.412665 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-76htz" event={"ID":"a9d53eda-fceb-4efa-a0fd-86912378be1c","Type":"ContainerStarted","Data":"a6a89f4ea5c21966bdee177b3823cf6d0aaad824fafa561c29a66e6bd00694b7"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.412708 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-76htz" event={"ID":"a9d53eda-fceb-4efa-a0fd-86912378be1c","Type":"ContainerStarted","Data":"598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.413022 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-c3c0-account-create-update-846gd" podStartSLOduration=1.412980767 podStartE2EDuration="1.412980767s" podCreationTimestamp="2026-01-04 12:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:18.410398258 +0000 UTC m=+1197.267584967" watchObservedRunningTime="2026-01-04 12:08:18.412980767 +0000 UTC m=+1197.270167466" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.436585 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhm69" event={"ID":"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db","Type":"ContainerStarted","Data":"e72f8d57673f1f31265b970246c597d52e4aa05a77c81c39ae84eb7621a5af98"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.436636 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhm69" event={"ID":"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db","Type":"ContainerStarted","Data":"bf9136f70fd7d38aea65990288322a1026335d3c3b85dcf7018ebc0e063e77a7"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.452121 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d59-account-create-update-jf95g" event={"ID":"dae99fe4-dbd0-47e7-b9ae-53689293573b","Type":"ContainerStarted","Data":"c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.456056 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw9pw\" (UniqueName: \"kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.456164 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.458564 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.461822 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-76htz" podStartSLOduration=2.46180451 podStartE2EDuration="2.46180451s" podCreationTimestamp="2026-01-04 12:08:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:18.436866484 +0000 UTC m=+1197.294053193" watchObservedRunningTime="2026-01-04 12:08:18.46180451 +0000 UTC m=+1197.318991219" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.474543 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw9pw\" (UniqueName: \"kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw\") pod \"root-account-create-update-gc7cq\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.487341 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wsnfd" event={"ID":"5c520b8c-14d2-4c61-b791-7cdbd31b2191","Type":"ContainerStarted","Data":"61f46bd2dd818ab2f21d65adf09a09a56d09d8f778ed5099169b3dcc783dfee5"} Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.505099 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-lhm69" podStartSLOduration=1.505064854 podStartE2EDuration="1.505064854s" podCreationTimestamp="2026-01-04 12:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:18.452371348 +0000 UTC m=+1197.309558057" watchObservedRunningTime="2026-01-04 12:08:18.505064854 +0000 UTC m=+1197.362251603" Jan 04 12:08:18 crc kubenswrapper[4797]: I0104 12:08:18.535952 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.496051 4797 generic.go:334] "Generic (PLEG): container finished" podID="d5b103c7-519f-4b21-a5a0-32e656db2dc9" containerID="04143dacb56865e0ec29514b982ee5edd9082a91bbf7f0fdd465db07b6bc6e9c" exitCode=0 Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.496128 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c3c0-account-create-update-846gd" event={"ID":"d5b103c7-519f-4b21-a5a0-32e656db2dc9","Type":"ContainerDied","Data":"04143dacb56865e0ec29514b982ee5edd9082a91bbf7f0fdd465db07b6bc6e9c"} Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.498327 4797 generic.go:334] "Generic (PLEG): container finished" podID="a9d53eda-fceb-4efa-a0fd-86912378be1c" containerID="a6a89f4ea5c21966bdee177b3823cf6d0aaad824fafa561c29a66e6bd00694b7" exitCode=0 Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.498369 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-76htz" event={"ID":"a9d53eda-fceb-4efa-a0fd-86912378be1c","Type":"ContainerDied","Data":"a6a89f4ea5c21966bdee177b3823cf6d0aaad824fafa561c29a66e6bd00694b7"} Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.505777 4797 generic.go:334] "Generic (PLEG): container finished" podID="5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" containerID="e72f8d57673f1f31265b970246c597d52e4aa05a77c81c39ae84eb7621a5af98" exitCode=0 Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.505845 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhm69" event={"ID":"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db","Type":"ContainerDied","Data":"e72f8d57673f1f31265b970246c597d52e4aa05a77c81c39ae84eb7621a5af98"} Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.507114 4797 generic.go:334] "Generic (PLEG): container finished" podID="dae99fe4-dbd0-47e7-b9ae-53689293573b" containerID="79f59fca969a408ad9a7a89258e348f5ad676b2c4c9925be24dca40d1cff80a8" exitCode=0 Jan 04 12:08:19 crc kubenswrapper[4797]: I0104 12:08:19.507144 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d59-account-create-update-jf95g" event={"ID":"dae99fe4-dbd0-47e7-b9ae-53689293573b","Type":"ContainerDied","Data":"79f59fca969a408ad9a7a89258e348f5ad676b2c4c9925be24dca40d1cff80a8"} Jan 04 12:08:20 crc kubenswrapper[4797]: W0104 12:08:20.122869 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda91a3dfc_cd1e_42e4_b40b_c3f18292b523.slice/crio-de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d WatchSource:0}: Error finding container de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d: Status 404 returned error can't find the container with id de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.522594 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"d94828be090a9c92168c4bc7043848fc90bb1e6e64c5c3b90561fa797b2111c6"} Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.524961 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rw479" event={"ID":"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c","Type":"ContainerStarted","Data":"65c274a2548c417a91f46662d6419264a20263e62649a7b5f1180ed7e25e6e5e"} Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.525017 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rw479" event={"ID":"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c","Type":"ContainerStarted","Data":"ab062ba74962c3168bd4ed40b45bdcbac568a63b219472495000772ad515b361"} Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.526867 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-5j9cp" event={"ID":"a91a3dfc-cd1e-42e4-b40b-c3f18292b523","Type":"ContainerStarted","Data":"74fc952a13313899a93f6ee90b9e6169f5faee6cabe0dffd0615ff5e9f2d1024"} Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.526903 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-5j9cp" event={"ID":"a91a3dfc-cd1e-42e4-b40b-c3f18292b523","Type":"ContainerStarted","Data":"de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d"} Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.551070 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-rw479" podStartSLOduration=3.551055629 podStartE2EDuration="3.551055629s" podCreationTimestamp="2026-01-04 12:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:20.5503276 +0000 UTC m=+1199.407514309" watchObservedRunningTime="2026-01-04 12:08:20.551055629 +0000 UTC m=+1199.408242338" Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.566295 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-e050-account-create-update-5j9cp" podStartSLOduration=3.566279145 podStartE2EDuration="3.566279145s" podCreationTimestamp="2026-01-04 12:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:20.565211727 +0000 UTC m=+1199.422398436" watchObservedRunningTime="2026-01-04 12:08:20.566279145 +0000 UTC m=+1199.423465854" Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.640843 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-gc7cq"] Jan 04 12:08:20 crc kubenswrapper[4797]: W0104 12:08:20.670674 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod387afd59_3ed2_49d0_92f8_60c250ea32fd.slice/crio-d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d WatchSource:0}: Error finding container d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d: Status 404 returned error can't find the container with id d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d Jan 04 12:08:20 crc kubenswrapper[4797]: I0104 12:08:20.984691 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.029852 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-976mf\" (UniqueName: \"kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf\") pod \"dae99fe4-dbd0-47e7-b9ae-53689293573b\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.046179 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts\") pod \"dae99fe4-dbd0-47e7-b9ae-53689293573b\" (UID: \"dae99fe4-dbd0-47e7-b9ae-53689293573b\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.047570 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dae99fe4-dbd0-47e7-b9ae-53689293573b" (UID: "dae99fe4-dbd0-47e7-b9ae-53689293573b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.054868 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf" (OuterVolumeSpecName: "kube-api-access-976mf") pod "dae99fe4-dbd0-47e7-b9ae-53689293573b" (UID: "dae99fe4-dbd0-47e7-b9ae-53689293573b"). InnerVolumeSpecName "kube-api-access-976mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.088339 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.100802 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.112296 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-76htz" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.150846 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-677t7\" (UniqueName: \"kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7\") pod \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.150900 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts\") pod \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\" (UID: \"d5b103c7-519f-4b21-a5a0-32e656db2dc9\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151046 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts\") pod \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151178 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ppz7\" (UniqueName: \"kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7\") pod \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\" (UID: \"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151628 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d5b103c7-519f-4b21-a5a0-32e656db2dc9" (UID: "d5b103c7-519f-4b21-a5a0-32e656db2dc9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151704 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" (UID: "5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151831 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae99fe4-dbd0-47e7-b9ae-53689293573b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151874 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5b103c7-519f-4b21-a5a0-32e656db2dc9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151887 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.151898 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-976mf\" (UniqueName: \"kubernetes.io/projected/dae99fe4-dbd0-47e7-b9ae-53689293573b-kube-api-access-976mf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.155025 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7" (OuterVolumeSpecName: "kube-api-access-7ppz7") pod "5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" (UID: "5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db"). InnerVolumeSpecName "kube-api-access-7ppz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.165192 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7" (OuterVolumeSpecName: "kube-api-access-677t7") pod "d5b103c7-519f-4b21-a5a0-32e656db2dc9" (UID: "d5b103c7-519f-4b21-a5a0-32e656db2dc9"). InnerVolumeSpecName "kube-api-access-677t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.247863 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.252587 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts\") pod \"a9d53eda-fceb-4efa-a0fd-86912378be1c\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.252647 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpmcg\" (UniqueName: \"kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg\") pod \"a9d53eda-fceb-4efa-a0fd-86912378be1c\" (UID: \"a9d53eda-fceb-4efa-a0fd-86912378be1c\") " Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.253164 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9d53eda-fceb-4efa-a0fd-86912378be1c" (UID: "a9d53eda-fceb-4efa-a0fd-86912378be1c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.253372 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d53eda-fceb-4efa-a0fd-86912378be1c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.253393 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-677t7\" (UniqueName: \"kubernetes.io/projected/d5b103c7-519f-4b21-a5a0-32e656db2dc9-kube-api-access-677t7\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.253404 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ppz7\" (UniqueName: \"kubernetes.io/projected/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db-kube-api-access-7ppz7\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.258431 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg" (OuterVolumeSpecName: "kube-api-access-mpmcg") pod "a9d53eda-fceb-4efa-a0fd-86912378be1c" (UID: "a9d53eda-fceb-4efa-a0fd-86912378be1c"). InnerVolumeSpecName "kube-api-access-mpmcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.355516 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpmcg\" (UniqueName: \"kubernetes.io/projected/a9d53eda-fceb-4efa-a0fd-86912378be1c-kube-api-access-mpmcg\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.562956 4797 generic.go:334] "Generic (PLEG): container finished" podID="387afd59-3ed2-49d0-92f8-60c250ea32fd" containerID="c9d82f755fa7d5c278d143cd78882857725e2a0d5842a8e491e85b49ed482d22" exitCode=0 Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.563336 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-gc7cq" event={"ID":"387afd59-3ed2-49d0-92f8-60c250ea32fd","Type":"ContainerDied","Data":"c9d82f755fa7d5c278d143cd78882857725e2a0d5842a8e491e85b49ed482d22"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.563362 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-gc7cq" event={"ID":"387afd59-3ed2-49d0-92f8-60c250ea32fd","Type":"ContainerStarted","Data":"d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.582268 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c3c0-account-create-update-846gd" event={"ID":"d5b103c7-519f-4b21-a5a0-32e656db2dc9","Type":"ContainerDied","Data":"709aaa076b44fbe4a20a164fc2490a93790ca4ca76de0fe605bce49ae5fc7140"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.582307 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c3c0-account-create-update-846gd" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.582316 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="709aaa076b44fbe4a20a164fc2490a93790ca4ca76de0fe605bce49ae5fc7140" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.586542 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-76htz" event={"ID":"a9d53eda-fceb-4efa-a0fd-86912378be1c","Type":"ContainerDied","Data":"598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.586584 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="598c6fcc2c846989b090b33b1f318c8784da0c39170beb4613b5026594913f1e" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.586635 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-76htz" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.590576 4797 generic.go:334] "Generic (PLEG): container finished" podID="070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" containerID="65c274a2548c417a91f46662d6419264a20263e62649a7b5f1180ed7e25e6e5e" exitCode=0 Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.590681 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rw479" event={"ID":"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c","Type":"ContainerDied","Data":"65c274a2548c417a91f46662d6419264a20263e62649a7b5f1180ed7e25e6e5e"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.594926 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhm69" event={"ID":"5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db","Type":"ContainerDied","Data":"bf9136f70fd7d38aea65990288322a1026335d3c3b85dcf7018ebc0e063e77a7"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.594956 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf9136f70fd7d38aea65990288322a1026335d3c3b85dcf7018ebc0e063e77a7" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.595012 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhm69" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.598159 4797 generic.go:334] "Generic (PLEG): container finished" podID="a91a3dfc-cd1e-42e4-b40b-c3f18292b523" containerID="74fc952a13313899a93f6ee90b9e6169f5faee6cabe0dffd0615ff5e9f2d1024" exitCode=0 Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.598218 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-5j9cp" event={"ID":"a91a3dfc-cd1e-42e4-b40b-c3f18292b523","Type":"ContainerDied","Data":"74fc952a13313899a93f6ee90b9e6169f5faee6cabe0dffd0615ff5e9f2d1024"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.618489 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"4d0acf805d72a971037b409b90ec01a6ebbc5d590e92c591c625d2fa707db99c"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.618533 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"6434cfdff0caacbdc87686c3fccdc177be05f4195a31c1b2f6bbbfe1abc8fc18"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.618543 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"dac8160cf70dc4b847fed968eb4544e73150e080abe8ddac5ed8f69951612687"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.625010 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d59-account-create-update-jf95g" event={"ID":"dae99fe4-dbd0-47e7-b9ae-53689293573b","Type":"ContainerDied","Data":"c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69"} Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.625044 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c45fd04933ad67f9914bce5b663941511cca25ee92e2b6a8fee5513f51d61c69" Jan 04 12:08:21 crc kubenswrapper[4797]: I0104 12:08:21.625087 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d59-account-create-update-jf95g" Jan 04 12:08:22 crc kubenswrapper[4797]: I0104 12:08:22.644363 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"983ab368fd1a7cc5f480857b432d0a3e9e25e3576b0af3b3338d273ed9c26b17"} Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.159227 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.166448 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rw479" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.169348 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.289805 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts\") pod \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.289899 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btxnv\" (UniqueName: \"kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv\") pod \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.289952 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59bn6\" (UniqueName: \"kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6\") pod \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\" (UID: \"a91a3dfc-cd1e-42e4-b40b-c3f18292b523\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.290022 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts\") pod \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\" (UID: \"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.290071 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw9pw\" (UniqueName: \"kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw\") pod \"387afd59-3ed2-49d0-92f8-60c250ea32fd\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.290090 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts\") pod \"387afd59-3ed2-49d0-92f8-60c250ea32fd\" (UID: \"387afd59-3ed2-49d0-92f8-60c250ea32fd\") " Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.290676 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a91a3dfc-cd1e-42e4-b40b-c3f18292b523" (UID: "a91a3dfc-cd1e-42e4-b40b-c3f18292b523"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.290741 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "387afd59-3ed2-49d0-92f8-60c250ea32fd" (UID: "387afd59-3ed2-49d0-92f8-60c250ea32fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.291312 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.291340 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/387afd59-3ed2-49d0-92f8-60c250ea32fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.291475 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" (UID: "070ef98c-992d-485a-ad7b-bb2ed9bf3f6c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.296094 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw" (OuterVolumeSpecName: "kube-api-access-tw9pw") pod "387afd59-3ed2-49d0-92f8-60c250ea32fd" (UID: "387afd59-3ed2-49d0-92f8-60c250ea32fd"). InnerVolumeSpecName "kube-api-access-tw9pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.296765 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv" (OuterVolumeSpecName: "kube-api-access-btxnv") pod "070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" (UID: "070ef98c-992d-485a-ad7b-bb2ed9bf3f6c"). InnerVolumeSpecName "kube-api-access-btxnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.299539 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6" (OuterVolumeSpecName: "kube-api-access-59bn6") pod "a91a3dfc-cd1e-42e4-b40b-c3f18292b523" (UID: "a91a3dfc-cd1e-42e4-b40b-c3f18292b523"). InnerVolumeSpecName "kube-api-access-59bn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.393930 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btxnv\" (UniqueName: \"kubernetes.io/projected/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-kube-api-access-btxnv\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.393971 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59bn6\" (UniqueName: \"kubernetes.io/projected/a91a3dfc-cd1e-42e4-b40b-c3f18292b523-kube-api-access-59bn6\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.393999 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.394010 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw9pw\" (UniqueName: \"kubernetes.io/projected/387afd59-3ed2-49d0-92f8-60c250ea32fd-kube-api-access-tw9pw\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.654258 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rw479" event={"ID":"070ef98c-992d-485a-ad7b-bb2ed9bf3f6c","Type":"ContainerDied","Data":"ab062ba74962c3168bd4ed40b45bdcbac568a63b219472495000772ad515b361"} Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.654299 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab062ba74962c3168bd4ed40b45bdcbac568a63b219472495000772ad515b361" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.654387 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rw479" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.655794 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-5j9cp" event={"ID":"a91a3dfc-cd1e-42e4-b40b-c3f18292b523","Type":"ContainerDied","Data":"de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d"} Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.655816 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de388fa693a337b7056207c111ce7051ac1e8a611fde46e5ada9578b4ba54c9d" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.655838 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-5j9cp" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.658206 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-gc7cq" event={"ID":"387afd59-3ed2-49d0-92f8-60c250ea32fd","Type":"ContainerDied","Data":"d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d"} Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.658257 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5e26722fccd35239f0c8bade132672a4d32bbaf0a6d489996cb13b837d97f6d" Jan 04 12:08:23 crc kubenswrapper[4797]: I0104 12:08:23.658928 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-gc7cq" Jan 04 12:08:25 crc kubenswrapper[4797]: I0104 12:08:25.679719 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"2ddf43a128cda79487c737e8b808ccee520d60c4184785332b0d190c687e82c4"} Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.239619 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" probeResult="failure" output=< Jan 04 12:08:26 crc kubenswrapper[4797]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 04 12:08:26 crc kubenswrapper[4797]: > Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.254143 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459269 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8lx8k-config-49dh8"] Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459755 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae99fe4-dbd0-47e7-b9ae-53689293573b" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459789 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae99fe4-dbd0-47e7-b9ae-53689293573b" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459822 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459834 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459858 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5b103c7-519f-4b21-a5a0-32e656db2dc9" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459873 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5b103c7-519f-4b21-a5a0-32e656db2dc9" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459899 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459911 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459934 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="387afd59-3ed2-49d0-92f8-60c250ea32fd" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459945 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="387afd59-3ed2-49d0-92f8-60c250ea32fd" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.459969 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d53eda-fceb-4efa-a0fd-86912378be1c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.459981 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d53eda-fceb-4efa-a0fd-86912378be1c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: E0104 12:08:26.460037 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a91a3dfc-cd1e-42e4-b40b-c3f18292b523" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460050 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a91a3dfc-cd1e-42e4-b40b-c3f18292b523" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460347 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5b103c7-519f-4b21-a5a0-32e656db2dc9" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460371 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d53eda-fceb-4efa-a0fd-86912378be1c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460391 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460406 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a91a3dfc-cd1e-42e4-b40b-c3f18292b523" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460435 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" containerName="mariadb-database-create" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460461 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="387afd59-3ed2-49d0-92f8-60c250ea32fd" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.460484 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae99fe4-dbd0-47e7-b9ae-53689293573b" containerName="mariadb-account-create-update" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.461312 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.463624 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.470412 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k-config-49dh8"] Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.546948 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.547014 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.547053 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfscz\" (UniqueName: \"kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.547192 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.547417 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.547575 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649246 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649319 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649343 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649376 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfscz\" (UniqueName: \"kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649397 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649453 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649601 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649609 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.649624 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.650007 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.652554 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.678038 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfscz\" (UniqueName: \"kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz\") pod \"ovn-controller-8lx8k-config-49dh8\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:26 crc kubenswrapper[4797]: I0104 12:08:26.785087 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:31 crc kubenswrapper[4797]: I0104 12:08:31.237000 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" probeResult="failure" output=< Jan 04 12:08:31 crc kubenswrapper[4797]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 04 12:08:31 crc kubenswrapper[4797]: > Jan 04 12:08:36 crc kubenswrapper[4797]: I0104 12:08:36.250302 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" probeResult="failure" output=< Jan 04 12:08:36 crc kubenswrapper[4797]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 04 12:08:36 crc kubenswrapper[4797]: > Jan 04 12:08:37 crc kubenswrapper[4797]: E0104 12:08:37.665486 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 04 12:08:37 crc kubenswrapper[4797]: E0104 12:08:37.666578 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pwz2n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-2hjqq_openstack(e64d6948-1c4e-4db6-b739-24b2aba46fd3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:08:37 crc kubenswrapper[4797]: E0104 12:08:37.667887 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-2hjqq" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" Jan 04 12:08:37 crc kubenswrapper[4797]: E0104 12:08:37.797292 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-2hjqq" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.779483 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k-config-49dh8"] Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.804145 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-49dh8" event={"ID":"a644b240-d941-4061-9111-d2418db22f90","Type":"ContainerStarted","Data":"6a0499b7272a20b1ca436d6d1a3cf0c9c20ddd6ce675362ffdc8f15d1dbe6f0e"} Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.806197 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wsnfd" event={"ID":"5c520b8c-14d2-4c61-b791-7cdbd31b2191","Type":"ContainerStarted","Data":"f525d2ac4c5fe5db4a1eec40b42d8797edd3badd36e5653dd0a4e99c191a329c"} Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.818752 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerStarted","Data":"70f1225e794568121e45dff575941e5d35fc27a1defcff2979df065f4ecbaf37"} Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.841742 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-wsnfd" podStartSLOduration=2.409901199 podStartE2EDuration="21.841715738s" podCreationTimestamp="2026-01-04 12:08:17 +0000 UTC" firstStartedPulling="2026-01-04 12:08:18.257259192 +0000 UTC m=+1197.114445901" lastFinishedPulling="2026-01-04 12:08:37.689073701 +0000 UTC m=+1216.546260440" observedRunningTime="2026-01-04 12:08:38.830819887 +0000 UTC m=+1217.688006606" watchObservedRunningTime="2026-01-04 12:08:38.841715738 +0000 UTC m=+1217.698902477" Jan 04 12:08:38 crc kubenswrapper[4797]: I0104 12:08:38.891369 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.36864893 podStartE2EDuration="46.891348073s" podCreationTimestamp="2026-01-04 12:07:52 +0000 UTC" firstStartedPulling="2026-01-04 12:08:09.714103531 +0000 UTC m=+1188.571290240" lastFinishedPulling="2026-01-04 12:08:20.236802674 +0000 UTC m=+1199.093989383" observedRunningTime="2026-01-04 12:08:38.882342692 +0000 UTC m=+1217.739529411" watchObservedRunningTime="2026-01-04 12:08:38.891348073 +0000 UTC m=+1217.748534802" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.173609 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.176671 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.180363 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.216684 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282059 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxfbf\" (UniqueName: \"kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282288 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282421 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282520 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282748 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.282881 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.384907 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.384960 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxfbf\" (UniqueName: \"kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.385013 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.385048 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.385079 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.385165 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.386110 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.386214 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.386262 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.386267 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.386805 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.403331 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxfbf\" (UniqueName: \"kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf\") pod \"dnsmasq-dns-75bdffd66f-dw28l\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.515389 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:39 crc kubenswrapper[4797]: E0104 12:08:39.543953 4797 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda644b240_d941_4061_9111_d2418db22f90.slice/crio-b2f03c2d81b3bc4e7e06115c067fe0fe16ce918dfcb1eb0ed143a632062a64db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda644b240_d941_4061_9111_d2418db22f90.slice/crio-conmon-b2f03c2d81b3bc4e7e06115c067fe0fe16ce918dfcb1eb0ed143a632062a64db.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.826867 4797 generic.go:334] "Generic (PLEG): container finished" podID="a644b240-d941-4061-9111-d2418db22f90" containerID="b2f03c2d81b3bc4e7e06115c067fe0fe16ce918dfcb1eb0ed143a632062a64db" exitCode=0 Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.827082 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-49dh8" event={"ID":"a644b240-d941-4061-9111-d2418db22f90","Type":"ContainerDied","Data":"b2f03c2d81b3bc4e7e06115c067fe0fe16ce918dfcb1eb0ed143a632062a64db"} Jan 04 12:08:39 crc kubenswrapper[4797]: I0104 12:08:39.950676 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:40 crc kubenswrapper[4797]: I0104 12:08:40.834920 4797 generic.go:334] "Generic (PLEG): container finished" podID="5c520b8c-14d2-4c61-b791-7cdbd31b2191" containerID="f525d2ac4c5fe5db4a1eec40b42d8797edd3badd36e5653dd0a4e99c191a329c" exitCode=0 Jan 04 12:08:40 crc kubenswrapper[4797]: I0104 12:08:40.835061 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wsnfd" event={"ID":"5c520b8c-14d2-4c61-b791-7cdbd31b2191","Type":"ContainerDied","Data":"f525d2ac4c5fe5db4a1eec40b42d8797edd3badd36e5653dd0a4e99c191a329c"} Jan 04 12:08:40 crc kubenswrapper[4797]: I0104 12:08:40.837716 4797 generic.go:334] "Generic (PLEG): container finished" podID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerID="2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c" exitCode=0 Jan 04 12:08:40 crc kubenswrapper[4797]: I0104 12:08:40.838156 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" event={"ID":"c8faf8dc-7504-4567-8ec3-046695b655c7","Type":"ContainerDied","Data":"2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c"} Jan 04 12:08:40 crc kubenswrapper[4797]: I0104 12:08:40.838199 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" event={"ID":"c8faf8dc-7504-4567-8ec3-046695b655c7","Type":"ContainerStarted","Data":"97ae316d615c903be2c14fb637585c3ded4ac50ad0877866b1701f4886b22aa7"} Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.231483 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8lx8k" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.286106 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435419 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435529 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435590 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435610 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435625 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435677 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfscz\" (UniqueName: \"kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435708 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts\") pod \"a644b240-d941-4061-9111-d2418db22f90\" (UID: \"a644b240-d941-4061-9111-d2418db22f90\") " Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435749 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run" (OuterVolumeSpecName: "var-run") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.435769 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.436027 4797 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.436044 4797 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.436052 4797 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a644b240-d941-4061-9111-d2418db22f90-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.436553 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.436663 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts" (OuterVolumeSpecName: "scripts") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.440437 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz" (OuterVolumeSpecName: "kube-api-access-nfscz") pod "a644b240-d941-4061-9111-d2418db22f90" (UID: "a644b240-d941-4061-9111-d2418db22f90"). InnerVolumeSpecName "kube-api-access-nfscz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.536947 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.536978 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfscz\" (UniqueName: \"kubernetes.io/projected/a644b240-d941-4061-9111-d2418db22f90-kube-api-access-nfscz\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.537011 4797 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a644b240-d941-4061-9111-d2418db22f90-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.847897 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" event={"ID":"c8faf8dc-7504-4567-8ec3-046695b655c7","Type":"ContainerStarted","Data":"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91"} Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.848321 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.849631 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-49dh8" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.849654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-49dh8" event={"ID":"a644b240-d941-4061-9111-d2418db22f90","Type":"ContainerDied","Data":"6a0499b7272a20b1ca436d6d1a3cf0c9c20ddd6ce675362ffdc8f15d1dbe6f0e"} Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.849703 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a0499b7272a20b1ca436d6d1a3cf0c9c20ddd6ce675362ffdc8f15d1dbe6f0e" Jan 04 12:08:41 crc kubenswrapper[4797]: I0104 12:08:41.885193 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" podStartSLOduration=2.885167278 podStartE2EDuration="2.885167278s" podCreationTimestamp="2026-01-04 12:08:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:41.885146997 +0000 UTC m=+1220.742333706" watchObservedRunningTime="2026-01-04 12:08:41.885167278 +0000 UTC m=+1220.742354067" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.178227 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.350205 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpshm\" (UniqueName: \"kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm\") pod \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.350395 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data\") pod \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.350463 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle\") pod \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\" (UID: \"5c520b8c-14d2-4c61-b791-7cdbd31b2191\") " Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.364519 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm" (OuterVolumeSpecName: "kube-api-access-kpshm") pod "5c520b8c-14d2-4c61-b791-7cdbd31b2191" (UID: "5c520b8c-14d2-4c61-b791-7cdbd31b2191"). InnerVolumeSpecName "kube-api-access-kpshm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.420505 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8lx8k-config-49dh8"] Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.423575 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c520b8c-14d2-4c61-b791-7cdbd31b2191" (UID: "5c520b8c-14d2-4c61-b791-7cdbd31b2191"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.438814 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8lx8k-config-49dh8"] Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.439105 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data" (OuterVolumeSpecName: "config-data") pod "5c520b8c-14d2-4c61-b791-7cdbd31b2191" (UID: "5c520b8c-14d2-4c61-b791-7cdbd31b2191"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.452508 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpshm\" (UniqueName: \"kubernetes.io/projected/5c520b8c-14d2-4c61-b791-7cdbd31b2191-kube-api-access-kpshm\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.452538 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.452547 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c520b8c-14d2-4c61-b791-7cdbd31b2191-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.494743 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8lx8k-config-2chn7"] Jan 04 12:08:42 crc kubenswrapper[4797]: E0104 12:08:42.495166 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a644b240-d941-4061-9111-d2418db22f90" containerName="ovn-config" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.495187 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a644b240-d941-4061-9111-d2418db22f90" containerName="ovn-config" Jan 04 12:08:42 crc kubenswrapper[4797]: E0104 12:08:42.495217 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c520b8c-14d2-4c61-b791-7cdbd31b2191" containerName="keystone-db-sync" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.495226 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c520b8c-14d2-4c61-b791-7cdbd31b2191" containerName="keystone-db-sync" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.495429 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c520b8c-14d2-4c61-b791-7cdbd31b2191" containerName="keystone-db-sync" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.495465 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a644b240-d941-4061-9111-d2418db22f90" containerName="ovn-config" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.496129 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.498108 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.513221 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k-config-2chn7"] Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670573 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670646 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcxz9\" (UniqueName: \"kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670681 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670729 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670759 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.670829 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772526 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772618 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772647 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772705 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcxz9\" (UniqueName: \"kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772736 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772772 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772865 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.772872 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.773129 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.773605 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.774785 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.789478 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcxz9\" (UniqueName: \"kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9\") pod \"ovn-controller-8lx8k-config-2chn7\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.810087 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.862601 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wsnfd" event={"ID":"5c520b8c-14d2-4c61-b791-7cdbd31b2191","Type":"ContainerDied","Data":"61f46bd2dd818ab2f21d65adf09a09a56d09d8f778ed5099169b3dcc783dfee5"} Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.862641 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61f46bd2dd818ab2f21d65adf09a09a56d09d8f778ed5099169b3dcc783dfee5" Jan 04 12:08:42 crc kubenswrapper[4797]: I0104 12:08:42.862643 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wsnfd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.044902 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.072883 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.074180 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.089653 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.161340 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-md6zk"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.162503 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.165593 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.165663 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.165832 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.166216 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zjjcs" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.168732 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.169952 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-md6zk"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180111 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180182 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180215 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvzjp\" (UniqueName: \"kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180248 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180303 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.180365 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.281857 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282092 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282182 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282319 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282424 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282523 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282604 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282670 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvzjp\" (UniqueName: \"kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282749 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl5hd\" (UniqueName: \"kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282815 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282879 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282946 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.282693 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.283238 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.283829 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.283940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.284518 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.319051 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvzjp\" (UniqueName: \"kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp\") pod \"dnsmasq-dns-77bbd879b9-shcpd\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.329809 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8lx8k-config-2chn7"] Jan 04 12:08:43 crc kubenswrapper[4797]: W0104 12:08:43.335054 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode897aaa5_6e8a_469b_94b8_51bbc5545aea.slice/crio-0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053 WatchSource:0}: Error finding container 0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053: Status 404 returned error can't find the container with id 0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053 Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.377795 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.379594 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.382653 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.382835 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386775 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386847 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386887 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386915 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl5hd\" (UniqueName: \"kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386934 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.386950 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.396384 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.397303 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.398958 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.399689 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.400176 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.407038 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.409736 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.424545 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl5hd\" (UniqueName: \"kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd\") pod \"keystone-bootstrap-md6zk\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.455217 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7dbzx"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.457904 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.464525 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.466013 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wxlrf" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.471941 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493814 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493883 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493919 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6c8w\" (UniqueName: \"kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493938 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493957 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.493998 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.494020 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.500227 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a644b240-d941-4061-9111-d2418db22f90" path="/var/lib/kubelet/pods/a644b240-d941-4061-9111-d2418db22f90/volumes" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.500848 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7dbzx"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.508094 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.551039 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-qc984"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.552135 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.572057 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-qc984"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.580116 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4q6pn"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.580677 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-grbjg" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.580758 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.581377 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.596599 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4q6pn"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601731 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601762 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601848 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601876 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601889 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601905 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc96w\" (UniqueName: \"kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601925 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.601968 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.602136 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-257gm" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.609017 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.616150 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.619137 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6c8w\" (UniqueName: \"kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.619177 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.619226 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.619253 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.619307 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.624592 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.631609 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.638301 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.639139 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.656672 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.660327 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.668885 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.672430 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6c8w\" (UniqueName: \"kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w\") pod \"ceilometer-0\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.705570 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-lnngq"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.708279 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.718661 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.718836 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-45b86" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.718861 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.722801 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.722855 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.722902 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.722934 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.722971 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.723000 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc96w\" (UniqueName: \"kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724270 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724302 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdtl9\" (UniqueName: \"kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724336 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724360 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd7zz\" (UniqueName: \"kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724464 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.724481 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.728275 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.728790 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.742388 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lnngq"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.744175 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.750490 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.750570 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.772162 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.778632 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc96w\" (UniqueName: \"kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w\") pod \"cinder-db-sync-7dbzx\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.778951 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.784284 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825515 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825551 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd7zz\" (UniqueName: \"kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825604 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825627 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825646 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825665 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825758 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825781 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnvq9\" (UniqueName: \"kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825833 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825864 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.825902 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdtl9\" (UniqueName: \"kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.832650 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.838964 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.867470 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.880659 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdtl9\" (UniqueName: \"kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.881490 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.883309 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle\") pod \"neutron-db-sync-4q6pn\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.883414 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.884773 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd7zz\" (UniqueName: \"kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz\") pod \"barbican-db-sync-qc984\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.916420 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qc984" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.917392 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-2chn7" event={"ID":"e897aaa5-6e8a-469b-94b8-51bbc5545aea","Type":"ContainerStarted","Data":"0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053"} Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.917449 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="dnsmasq-dns" containerID="cri-o://7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91" gracePeriod=10 Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.927611 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnvq9\" (UniqueName: \"kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930033 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930121 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bq9p\" (UniqueName: \"kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930241 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930275 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930291 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930398 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930418 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930455 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930483 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.930516 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.931683 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.933684 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.934840 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.938930 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.954376 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnvq9\" (UniqueName: \"kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9\") pod \"placement-db-sync-lnngq\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:43 crc kubenswrapper[4797]: I0104 12:08:43.981343 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032468 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bq9p\" (UniqueName: \"kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032537 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032570 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032596 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032666 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.032703 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.033742 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.033785 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.034249 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.034370 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.034512 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.054856 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bq9p\" (UniqueName: \"kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p\") pod \"dnsmasq-dns-8495b76777-nfspv\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.120221 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lnngq" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.152023 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.286582 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.335567 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-md6zk"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.537715 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.612532 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.713862 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7dbzx"] Jan 04 12:08:44 crc kubenswrapper[4797]: W0104 12:08:44.725456 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65e365e6_5912_434a_a269_85dc5254dcba.slice/crio-20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4 WatchSource:0}: Error finding container 20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4: Status 404 returned error can't find the container with id 20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749057 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxfbf\" (UniqueName: \"kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749150 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749178 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749217 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749242 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.749322 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc\") pod \"c8faf8dc-7504-4567-8ec3-046695b655c7\" (UID: \"c8faf8dc-7504-4567-8ec3-046695b655c7\") " Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.761284 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf" (OuterVolumeSpecName: "kube-api-access-fxfbf") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "kube-api-access-fxfbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.805935 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.806742 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.815446 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config" (OuterVolumeSpecName: "config") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.819606 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.835465 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4q6pn"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.839101 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c8faf8dc-7504-4567-8ec3-046695b655c7" (UID: "c8faf8dc-7504-4567-8ec3-046695b655c7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.845481 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lnngq"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850748 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxfbf\" (UniqueName: \"kubernetes.io/projected/c8faf8dc-7504-4567-8ec3-046695b655c7-kube-api-access-fxfbf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850772 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850783 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850792 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850802 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.850811 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8faf8dc-7504-4567-8ec3-046695b655c7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:44 crc kubenswrapper[4797]: W0104 12:08:44.851687 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8575d68_d47c_4e79_a81f_0690139b672f.slice/crio-0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866 WatchSource:0}: Error finding container 0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866: Status 404 returned error can't find the container with id 0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.854410 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-qc984"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.942459 4797 generic.go:334] "Generic (PLEG): container finished" podID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerID="7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91" exitCode=0 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.942517 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.942560 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" event={"ID":"c8faf8dc-7504-4567-8ec3-046695b655c7","Type":"ContainerDied","Data":"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.942588 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bdffd66f-dw28l" event={"ID":"c8faf8dc-7504-4567-8ec3-046695b655c7","Type":"ContainerDied","Data":"97ae316d615c903be2c14fb637585c3ded4ac50ad0877866b1701f4886b22aa7"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.942604 4797 scope.go:117] "RemoveContainer" containerID="7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.951028 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:08:44 crc kubenswrapper[4797]: W0104 12:08:44.957044 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8221293a_5cd8_4cca_b221_b30d78f8c2aa.slice/crio-a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84 WatchSource:0}: Error finding container a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84: Status 404 returned error can't find the container with id a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.959047 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-md6zk" event={"ID":"8f531bc4-862f-441f-af33-388d5e26451b","Type":"ContainerStarted","Data":"6096170da8024bd185ccbb449e4c936cf1c7b826fb7c745391b53796803d1ba7"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.959082 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-md6zk" event={"ID":"8f531bc4-862f-441f-af33-388d5e26451b","Type":"ContainerStarted","Data":"54bdfb19dfd338f54b8224ee0fbc085a8ea514ac0b68a5080fd44790fa048903"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.962045 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lnngq" event={"ID":"e763a131-879e-4bfd-8138-2d3da9195289","Type":"ContainerStarted","Data":"309ba9344840164f369952815b82449f0e1aed96b049210a1ac0777d4373a26d"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.963141 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4q6pn" event={"ID":"724d4392-4880-4e98-b78e-676b080c32cc","Type":"ContainerStarted","Data":"5eff6855f990f6b0f6ad10b10eda099f99bad4aaf509b476130ed2ab5cc7f78a"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.966774 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerStarted","Data":"f937747a89dac243fd500323fa05bb5744617676dac1dc71d2e584bb1f3073ed"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.970346 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7dbzx" event={"ID":"65e365e6-5912-434a-a269-85dc5254dcba","Type":"ContainerStarted","Data":"20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.972478 4797 generic.go:334] "Generic (PLEG): container finished" podID="e897aaa5-6e8a-469b-94b8-51bbc5545aea" containerID="10f06d60e44f21d7b74181cb279dbd06be17a5955d943a61e439c5ecc67b13b1" exitCode=0 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.972672 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-2chn7" event={"ID":"e897aaa5-6e8a-469b-94b8-51bbc5545aea","Type":"ContainerDied","Data":"10f06d60e44f21d7b74181cb279dbd06be17a5955d943a61e439c5ecc67b13b1"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.974070 4797 generic.go:334] "Generic (PLEG): container finished" podID="26ed8234-1301-4057-941f-7111456d4e8a" containerID="c9d5684fe4d0cb46a8c93efc77dcee480a15077e3bc12034dd2b186f3af3753d" exitCode=0 Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.974125 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" event={"ID":"26ed8234-1301-4057-941f-7111456d4e8a","Type":"ContainerDied","Data":"c9d5684fe4d0cb46a8c93efc77dcee480a15077e3bc12034dd2b186f3af3753d"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.974150 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" event={"ID":"26ed8234-1301-4057-941f-7111456d4e8a","Type":"ContainerStarted","Data":"12dae24fac3d6ca31c7f13450e1ef7e10c3f315889141a1243894cb13342dc41"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.977394 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.978667 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qc984" event={"ID":"a8575d68-d47c-4e79-a81f-0690139b672f","Type":"ContainerStarted","Data":"0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866"} Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.983059 4797 scope.go:117] "RemoveContainer" containerID="2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c" Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.983510 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75bdffd66f-dw28l"] Jan 04 12:08:44 crc kubenswrapper[4797]: I0104 12:08:44.989427 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-md6zk" podStartSLOduration=1.98940997 podStartE2EDuration="1.98940997s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:44.984381196 +0000 UTC m=+1223.841567905" watchObservedRunningTime="2026-01-04 12:08:44.98940997 +0000 UTC m=+1223.846596679" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.022632 4797 scope.go:117] "RemoveContainer" containerID="7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91" Jan 04 12:08:45 crc kubenswrapper[4797]: E0104 12:08:45.032699 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91\": container with ID starting with 7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91 not found: ID does not exist" containerID="7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.032740 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91"} err="failed to get container status \"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91\": rpc error: code = NotFound desc = could not find container \"7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91\": container with ID starting with 7831286979ffb80f3ac8bc162af37dd8817ecad383f04757ae5af980e4b12a91 not found: ID does not exist" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.032767 4797 scope.go:117] "RemoveContainer" containerID="2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c" Jan 04 12:08:45 crc kubenswrapper[4797]: E0104 12:08:45.033752 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c\": container with ID starting with 2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c not found: ID does not exist" containerID="2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.033776 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c"} err="failed to get container status \"2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c\": rpc error: code = NotFound desc = could not find container \"2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c\": container with ID starting with 2794170f1315d30b95e3cf40e312c6a484b2d57272ef1a6dbb0723416429143c not found: ID does not exist" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.457740 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.499563 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" path="/var/lib/kubelet/pods/c8faf8dc-7504-4567-8ec3-046695b655c7/volumes" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562329 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562455 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562484 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562557 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562583 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvzjp\" (UniqueName: \"kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.562635 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb\") pod \"26ed8234-1301-4057-941f-7111456d4e8a\" (UID: \"26ed8234-1301-4057-941f-7111456d4e8a\") " Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.600216 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.605510 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp" (OuterVolumeSpecName: "kube-api-access-cvzjp") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "kube-api-access-cvzjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.611143 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.617684 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config" (OuterVolumeSpecName: "config") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.618863 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.630629 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "26ed8234-1301-4057-941f-7111456d4e8a" (UID: "26ed8234-1301-4057-941f-7111456d4e8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.670979 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.671043 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.671056 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.671067 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.671081 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ed8234-1301-4057-941f-7111456d4e8a-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.671094 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvzjp\" (UniqueName: \"kubernetes.io/projected/26ed8234-1301-4057-941f-7111456d4e8a-kube-api-access-cvzjp\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:45 crc kubenswrapper[4797]: I0104 12:08:45.713180 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.035256 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" event={"ID":"26ed8234-1301-4057-941f-7111456d4e8a","Type":"ContainerDied","Data":"12dae24fac3d6ca31c7f13450e1ef7e10c3f315889141a1243894cb13342dc41"} Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.035306 4797 scope.go:117] "RemoveContainer" containerID="c9d5684fe4d0cb46a8c93efc77dcee480a15077e3bc12034dd2b186f3af3753d" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.035408 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bbd879b9-shcpd" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.071291 4797 generic.go:334] "Generic (PLEG): container finished" podID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerID="3d32769fbc3a11b048b5223b877e645906938f8e70f5f45524f14c66a9518161" exitCode=0 Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.072460 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-nfspv" event={"ID":"8221293a-5cd8-4cca-b221-b30d78f8c2aa","Type":"ContainerDied","Data":"3d32769fbc3a11b048b5223b877e645906938f8e70f5f45524f14c66a9518161"} Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.072491 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-nfspv" event={"ID":"8221293a-5cd8-4cca-b221-b30d78f8c2aa","Type":"ContainerStarted","Data":"a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84"} Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.093594 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4q6pn" event={"ID":"724d4392-4880-4e98-b78e-676b080c32cc","Type":"ContainerStarted","Data":"02ec9f1e17cb5f669f96a93b638752b2ab4a42af575488b118e2ccb8752faf9a"} Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.120772 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.134394 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77bbd879b9-shcpd"] Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.174060 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4q6pn" podStartSLOduration=3.17403659 podStartE2EDuration="3.17403659s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:46.159164474 +0000 UTC m=+1225.016351183" watchObservedRunningTime="2026-01-04 12:08:46.17403659 +0000 UTC m=+1225.031223289" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.530878 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589586 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcxz9\" (UniqueName: \"kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589617 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589670 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589694 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.589786 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts\") pod \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\" (UID: \"e897aaa5-6e8a-469b-94b8-51bbc5545aea\") " Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.590617 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.590650 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run" (OuterVolumeSpecName: "var-run") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.590956 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.591369 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.591540 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts" (OuterVolumeSpecName: "scripts") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.594853 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9" (OuterVolumeSpecName: "kube-api-access-rcxz9") pod "e897aaa5-6e8a-469b-94b8-51bbc5545aea" (UID: "e897aaa5-6e8a-469b-94b8-51bbc5545aea"). InnerVolumeSpecName "kube-api-access-rcxz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692459 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692513 4797 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692530 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcxz9\" (UniqueName: \"kubernetes.io/projected/e897aaa5-6e8a-469b-94b8-51bbc5545aea-kube-api-access-rcxz9\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692552 4797 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692563 4797 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e897aaa5-6e8a-469b-94b8-51bbc5545aea-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:46 crc kubenswrapper[4797]: I0104 12:08:46.692573 4797 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e897aaa5-6e8a-469b-94b8-51bbc5545aea-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.122791 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-nfspv" event={"ID":"8221293a-5cd8-4cca-b221-b30d78f8c2aa","Type":"ContainerStarted","Data":"2f71e905c6bd19bf3b24560c7b2912b9eb5d025c169894224f61b0bd8ce66f2e"} Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.125051 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k-config-2chn7" Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.125763 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k-config-2chn7" event={"ID":"e897aaa5-6e8a-469b-94b8-51bbc5545aea","Type":"ContainerDied","Data":"0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053"} Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.125787 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b7519f8be2bea3271cdf88cb85ee9a86253727a5a6ca9b95e51ed6bfa561053" Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.484548 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26ed8234-1301-4057-941f-7111456d4e8a" path="/var/lib/kubelet/pods/26ed8234-1301-4057-941f-7111456d4e8a/volumes" Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.618290 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8lx8k-config-2chn7"] Jan 04 12:08:47 crc kubenswrapper[4797]: I0104 12:08:47.629586 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8lx8k-config-2chn7"] Jan 04 12:08:48 crc kubenswrapper[4797]: I0104 12:08:48.137307 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:48 crc kubenswrapper[4797]: I0104 12:08:48.156874 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8495b76777-nfspv" podStartSLOduration=5.156856559 podStartE2EDuration="5.156856559s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:48.151974138 +0000 UTC m=+1227.009160857" watchObservedRunningTime="2026-01-04 12:08:48.156856559 +0000 UTC m=+1227.014043268" Jan 04 12:08:49 crc kubenswrapper[4797]: I0104 12:08:49.152145 4797 generic.go:334] "Generic (PLEG): container finished" podID="8f531bc4-862f-441f-af33-388d5e26451b" containerID="6096170da8024bd185ccbb449e4c936cf1c7b826fb7c745391b53796803d1ba7" exitCode=0 Jan 04 12:08:49 crc kubenswrapper[4797]: I0104 12:08:49.152202 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-md6zk" event={"ID":"8f531bc4-862f-441f-af33-388d5e26451b","Type":"ContainerDied","Data":"6096170da8024bd185ccbb449e4c936cf1c7b826fb7c745391b53796803d1ba7"} Jan 04 12:08:49 crc kubenswrapper[4797]: I0104 12:08:49.486224 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e897aaa5-6e8a-469b-94b8-51bbc5545aea" path="/var/lib/kubelet/pods/e897aaa5-6e8a-469b-94b8-51bbc5545aea/volumes" Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.842905 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987680 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987755 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl5hd\" (UniqueName: \"kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987798 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987828 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987879 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.987998 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle\") pod \"8f531bc4-862f-441f-af33-388d5e26451b\" (UID: \"8f531bc4-862f-441f-af33-388d5e26451b\") " Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.994094 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.995079 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts" (OuterVolumeSpecName: "scripts") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:51 crc kubenswrapper[4797]: I0104 12:08:51.999044 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd" (OuterVolumeSpecName: "kube-api-access-rl5hd") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "kube-api-access-rl5hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.006457 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.018805 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.023227 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data" (OuterVolumeSpecName: "config-data") pod "8f531bc4-862f-441f-af33-388d5e26451b" (UID: "8f531bc4-862f-441f-af33-388d5e26451b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091149 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091185 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl5hd\" (UniqueName: \"kubernetes.io/projected/8f531bc4-862f-441f-af33-388d5e26451b-kube-api-access-rl5hd\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091199 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091210 4797 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091219 4797 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.091231 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f531bc4-862f-441f-af33-388d5e26451b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.177707 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-md6zk" event={"ID":"8f531bc4-862f-441f-af33-388d5e26451b","Type":"ContainerDied","Data":"54bdfb19dfd338f54b8224ee0fbc085a8ea514ac0b68a5080fd44790fa048903"} Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.177940 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54bdfb19dfd338f54b8224ee0fbc085a8ea514ac0b68a5080fd44790fa048903" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.177765 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-md6zk" Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.931155 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-md6zk"] Jan 04 12:08:52 crc kubenswrapper[4797]: I0104 12:08:52.939848 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-md6zk"] Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.058101 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7tpfs"] Jan 04 12:08:53 crc kubenswrapper[4797]: E0104 12:08:53.058815 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e897aaa5-6e8a-469b-94b8-51bbc5545aea" containerName="ovn-config" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.058855 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e897aaa5-6e8a-469b-94b8-51bbc5545aea" containerName="ovn-config" Jan 04 12:08:53 crc kubenswrapper[4797]: E0104 12:08:53.058884 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="dnsmasq-dns" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.058898 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="dnsmasq-dns" Jan 04 12:08:53 crc kubenswrapper[4797]: E0104 12:08:53.058921 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f531bc4-862f-441f-af33-388d5e26451b" containerName="keystone-bootstrap" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.058936 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f531bc4-862f-441f-af33-388d5e26451b" containerName="keystone-bootstrap" Jan 04 12:08:53 crc kubenswrapper[4797]: E0104 12:08:53.058974 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26ed8234-1301-4057-941f-7111456d4e8a" containerName="init" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.059021 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="26ed8234-1301-4057-941f-7111456d4e8a" containerName="init" Jan 04 12:08:53 crc kubenswrapper[4797]: E0104 12:08:53.059068 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="init" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.059081 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="init" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.060301 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e897aaa5-6e8a-469b-94b8-51bbc5545aea" containerName="ovn-config" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.060355 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8faf8dc-7504-4567-8ec3-046695b655c7" containerName="dnsmasq-dns" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.060378 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="26ed8234-1301-4057-941f-7111456d4e8a" containerName="init" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.060402 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f531bc4-862f-441f-af33-388d5e26451b" containerName="keystone-bootstrap" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.061405 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.069297 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7tpfs"] Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.071164 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.071619 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.071853 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.072133 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.075034 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zjjcs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.110514 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.111026 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.111112 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7dnl\" (UniqueName: \"kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.111153 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.111307 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.111417 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213381 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213525 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213564 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7dnl\" (UniqueName: \"kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213592 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213638 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.213688 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.218315 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.218402 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.221582 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.221673 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.222518 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.244954 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7dnl\" (UniqueName: \"kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl\") pod \"keystone-bootstrap-7tpfs\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.393770 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:08:53 crc kubenswrapper[4797]: I0104 12:08:53.490390 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f531bc4-862f-441f-af33-388d5e26451b" path="/var/lib/kubelet/pods/8f531bc4-862f-441f-af33-388d5e26451b/volumes" Jan 04 12:08:54 crc kubenswrapper[4797]: I0104 12:08:54.154683 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:08:54 crc kubenswrapper[4797]: I0104 12:08:54.241915 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:08:54 crc kubenswrapper[4797]: I0104 12:08:54.242160 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" containerID="cri-o://44067c99ea4de70545c8a94b5364d81bb441239ca0aaa302aa476fd8d4cfce0a" gracePeriod=10 Jan 04 12:08:55 crc kubenswrapper[4797]: I0104 12:08:55.211798 4797 generic.go:334] "Generic (PLEG): container finished" podID="1650ba5a-ccad-4447-ad19-02185249e682" containerID="44067c99ea4de70545c8a94b5364d81bb441239ca0aaa302aa476fd8d4cfce0a" exitCode=0 Jan 04 12:08:55 crc kubenswrapper[4797]: I0104 12:08:55.211864 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" event={"ID":"1650ba5a-ccad-4447-ad19-02185249e682","Type":"ContainerDied","Data":"44067c99ea4de70545c8a94b5364d81bb441239ca0aaa302aa476fd8d4cfce0a"} Jan 04 12:09:02 crc kubenswrapper[4797]: I0104 12:09:02.283159 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Jan 04 12:09:07 crc kubenswrapper[4797]: I0104 12:09:07.284105 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Jan 04 12:09:08 crc kubenswrapper[4797]: E0104 12:09:08.614253 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 04 12:09:08 crc kubenswrapper[4797]: E0104 12:09:08.616251 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pwz2n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-2hjqq_openstack(e64d6948-1c4e-4db6-b739-24b2aba46fd3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:09:08 crc kubenswrapper[4797]: E0104 12:09:08.618295 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-2hjqq" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.725139 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.850245 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config\") pod \"1650ba5a-ccad-4447-ad19-02185249e682\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.850332 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb\") pod \"1650ba5a-ccad-4447-ad19-02185249e682\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.850417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb\") pod \"1650ba5a-ccad-4447-ad19-02185249e682\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.850523 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc\") pod \"1650ba5a-ccad-4447-ad19-02185249e682\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.850567 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25pmf\" (UniqueName: \"kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf\") pod \"1650ba5a-ccad-4447-ad19-02185249e682\" (UID: \"1650ba5a-ccad-4447-ad19-02185249e682\") " Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.867293 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf" (OuterVolumeSpecName: "kube-api-access-25pmf") pod "1650ba5a-ccad-4447-ad19-02185249e682" (UID: "1650ba5a-ccad-4447-ad19-02185249e682"). InnerVolumeSpecName "kube-api-access-25pmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.891586 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config" (OuterVolumeSpecName: "config") pod "1650ba5a-ccad-4447-ad19-02185249e682" (UID: "1650ba5a-ccad-4447-ad19-02185249e682"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.898554 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1650ba5a-ccad-4447-ad19-02185249e682" (UID: "1650ba5a-ccad-4447-ad19-02185249e682"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.910358 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1650ba5a-ccad-4447-ad19-02185249e682" (UID: "1650ba5a-ccad-4447-ad19-02185249e682"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.922011 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1650ba5a-ccad-4447-ad19-02185249e682" (UID: "1650ba5a-ccad-4447-ad19-02185249e682"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.953226 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.953472 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.953485 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.953496 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25pmf\" (UniqueName: \"kubernetes.io/projected/1650ba5a-ccad-4447-ad19-02185249e682-kube-api-access-25pmf\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:08 crc kubenswrapper[4797]: I0104 12:09:08.953508 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1650ba5a-ccad-4447-ad19-02185249e682-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:09 crc kubenswrapper[4797]: E0104 12:09:09.204964 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16" Jan 04 12:09:09 crc kubenswrapper[4797]: E0104 12:09:09.205216 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bd7zz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-qc984_openstack(a8575d68-d47c-4e79-a81f-0690139b672f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:09:09 crc kubenswrapper[4797]: E0104 12:09:09.206853 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-qc984" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.344467 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.354344 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" event={"ID":"1650ba5a-ccad-4447-ad19-02185249e682","Type":"ContainerDied","Data":"2546e481a10f8178a92fd4f30b6aab587bc2d364e117e99ff9c89100bfe85cbe"} Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.354395 4797 scope.go:117] "RemoveContainer" containerID="44067c99ea4de70545c8a94b5364d81bb441239ca0aaa302aa476fd8d4cfce0a" Jan 04 12:09:09 crc kubenswrapper[4797]: E0104 12:09:09.355469 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16\\\"\"" pod="openstack/barbican-db-sync-qc984" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.407484 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.414321 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-k9xdt"] Jan 04 12:09:09 crc kubenswrapper[4797]: I0104 12:09:09.502453 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1650ba5a-ccad-4447-ad19-02185249e682" path="/var/lib/kubelet/pods/1650ba5a-ccad-4447-ad19-02185249e682/volumes" Jan 04 12:09:10 crc kubenswrapper[4797]: I0104 12:09:10.363054 4797 scope.go:117] "RemoveContainer" containerID="de1d4658c4876b69c123dad8d47fa379445823451e9792186447d7758b569734" Jan 04 12:09:10 crc kubenswrapper[4797]: E0104 12:09:10.381727 4797 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Jan 04 12:09:10 crc kubenswrapper[4797]: E0104 12:09:10.382502 4797 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vc96w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-7dbzx_openstack(65e365e6-5912-434a-a269-85dc5254dcba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:09:10 crc kubenswrapper[4797]: E0104 12:09:10.387000 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-7dbzx" podUID="65e365e6-5912-434a-a269-85dc5254dcba" Jan 04 12:09:10 crc kubenswrapper[4797]: I0104 12:09:10.795733 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7tpfs"] Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.372162 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lnngq" event={"ID":"e763a131-879e-4bfd-8138-2d3da9195289","Type":"ContainerStarted","Data":"8b6da3f0a29ef4c778dc8fb64cc5a3f05fdd14f45969bc51a6a39c1ee76a3e94"} Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.376139 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7tpfs" event={"ID":"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4","Type":"ContainerStarted","Data":"31c529a7d82420563a356ef0183a956a8b578eb086f36f052c1f35e27b1aed25"} Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.376190 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7tpfs" event={"ID":"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4","Type":"ContainerStarted","Data":"f5529dd0ae72323f3a41ef22d99f0985f2c8d9cc0e4bf8c4bff4b94e31604305"} Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.381572 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerStarted","Data":"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e"} Jan 04 12:09:11 crc kubenswrapper[4797]: E0104 12:09:11.384644 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-7dbzx" podUID="65e365e6-5912-434a-a269-85dc5254dcba" Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.403309 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-lnngq" podStartSLOduration=2.891359841 podStartE2EDuration="28.403291144s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="2026-01-04 12:08:44.851277194 +0000 UTC m=+1223.708463903" lastFinishedPulling="2026-01-04 12:09:10.363208507 +0000 UTC m=+1249.220395206" observedRunningTime="2026-01-04 12:09:11.399307779 +0000 UTC m=+1250.256494488" watchObservedRunningTime="2026-01-04 12:09:11.403291144 +0000 UTC m=+1250.260477843" Jan 04 12:09:11 crc kubenswrapper[4797]: I0104 12:09:11.434010 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7tpfs" podStartSLOduration=18.433976402 podStartE2EDuration="18.433976402s" podCreationTimestamp="2026-01-04 12:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:11.426452684 +0000 UTC m=+1250.283639393" watchObservedRunningTime="2026-01-04 12:09:11.433976402 +0000 UTC m=+1250.291163111" Jan 04 12:09:12 crc kubenswrapper[4797]: I0104 12:09:12.285252 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-k9xdt" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Jan 04 12:09:12 crc kubenswrapper[4797]: I0104 12:09:12.392219 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerStarted","Data":"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae"} Jan 04 12:09:13 crc kubenswrapper[4797]: I0104 12:09:13.402723 4797 generic.go:334] "Generic (PLEG): container finished" podID="e763a131-879e-4bfd-8138-2d3da9195289" containerID="8b6da3f0a29ef4c778dc8fb64cc5a3f05fdd14f45969bc51a6a39c1ee76a3e94" exitCode=0 Jan 04 12:09:13 crc kubenswrapper[4797]: I0104 12:09:13.403108 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lnngq" event={"ID":"e763a131-879e-4bfd-8138-2d3da9195289","Type":"ContainerDied","Data":"8b6da3f0a29ef4c778dc8fb64cc5a3f05fdd14f45969bc51a6a39c1ee76a3e94"} Jan 04 12:09:14 crc kubenswrapper[4797]: I0104 12:09:14.412257 4797 generic.go:334] "Generic (PLEG): container finished" podID="4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" containerID="31c529a7d82420563a356ef0183a956a8b578eb086f36f052c1f35e27b1aed25" exitCode=0 Jan 04 12:09:14 crc kubenswrapper[4797]: I0104 12:09:14.412666 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7tpfs" event={"ID":"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4","Type":"ContainerDied","Data":"31c529a7d82420563a356ef0183a956a8b578eb086f36f052c1f35e27b1aed25"} Jan 04 12:09:15 crc kubenswrapper[4797]: I0104 12:09:15.420716 4797 generic.go:334] "Generic (PLEG): container finished" podID="724d4392-4880-4e98-b78e-676b080c32cc" containerID="02ec9f1e17cb5f669f96a93b638752b2ab4a42af575488b118e2ccb8752faf9a" exitCode=0 Jan 04 12:09:15 crc kubenswrapper[4797]: I0104 12:09:15.420853 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4q6pn" event={"ID":"724d4392-4880-4e98-b78e-676b080c32cc","Type":"ContainerDied","Data":"02ec9f1e17cb5f669f96a93b638752b2ab4a42af575488b118e2ccb8752faf9a"} Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.193360 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.201419 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lnngq" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.245146 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300628 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config\") pod \"724d4392-4880-4e98-b78e-676b080c32cc\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300675 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300733 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300777 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle\") pod \"e763a131-879e-4bfd-8138-2d3da9195289\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300832 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300868 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnvq9\" (UniqueName: \"kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9\") pod \"e763a131-879e-4bfd-8138-2d3da9195289\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300930 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7dnl\" (UniqueName: \"kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300961 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle\") pod \"724d4392-4880-4e98-b78e-676b080c32cc\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.300977 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdtl9\" (UniqueName: \"kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9\") pod \"724d4392-4880-4e98-b78e-676b080c32cc\" (UID: \"724d4392-4880-4e98-b78e-676b080c32cc\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.301031 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.301089 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts\") pod \"e763a131-879e-4bfd-8138-2d3da9195289\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.301113 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data\") pod \"e763a131-879e-4bfd-8138-2d3da9195289\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.301162 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data\") pod \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\" (UID: \"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.301179 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs\") pod \"e763a131-879e-4bfd-8138-2d3da9195289\" (UID: \"e763a131-879e-4bfd-8138-2d3da9195289\") " Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.302105 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs" (OuterVolumeSpecName: "logs") pod "e763a131-879e-4bfd-8138-2d3da9195289" (UID: "e763a131-879e-4bfd-8138-2d3da9195289"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.309035 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl" (OuterVolumeSpecName: "kube-api-access-l7dnl") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "kube-api-access-l7dnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.310033 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.311238 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.313022 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts" (OuterVolumeSpecName: "scripts") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.313826 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts" (OuterVolumeSpecName: "scripts") pod "e763a131-879e-4bfd-8138-2d3da9195289" (UID: "e763a131-879e-4bfd-8138-2d3da9195289"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.316418 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9" (OuterVolumeSpecName: "kube-api-access-rdtl9") pod "724d4392-4880-4e98-b78e-676b080c32cc" (UID: "724d4392-4880-4e98-b78e-676b080c32cc"). InnerVolumeSpecName "kube-api-access-rdtl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.319556 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9" (OuterVolumeSpecName: "kube-api-access-cnvq9") pod "e763a131-879e-4bfd-8138-2d3da9195289" (UID: "e763a131-879e-4bfd-8138-2d3da9195289"). InnerVolumeSpecName "kube-api-access-cnvq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.337836 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e763a131-879e-4bfd-8138-2d3da9195289" (UID: "e763a131-879e-4bfd-8138-2d3da9195289"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.338779 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data" (OuterVolumeSpecName: "config-data") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.339366 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config" (OuterVolumeSpecName: "config") pod "724d4392-4880-4e98-b78e-676b080c32cc" (UID: "724d4392-4880-4e98-b78e-676b080c32cc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.345172 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" (UID: "4eb4366c-79ee-4bc5-9434-8bf5134c0fe4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.356695 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data" (OuterVolumeSpecName: "config-data") pod "e763a131-879e-4bfd-8138-2d3da9195289" (UID: "e763a131-879e-4bfd-8138-2d3da9195289"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.356974 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "724d4392-4880-4e98-b78e-676b080c32cc" (UID: "724d4392-4880-4e98-b78e-676b080c32cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403871 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403916 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403926 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403935 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e763a131-879e-4bfd-8138-2d3da9195289-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403943 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403951 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403961 4797 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403969 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e763a131-879e-4bfd-8138-2d3da9195289-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.403983 4797 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.404018 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnvq9\" (UniqueName: \"kubernetes.io/projected/e763a131-879e-4bfd-8138-2d3da9195289-kube-api-access-cnvq9\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.404030 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7dnl\" (UniqueName: \"kubernetes.io/projected/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-kube-api-access-l7dnl\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.404040 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724d4392-4880-4e98-b78e-676b080c32cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.404048 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdtl9\" (UniqueName: \"kubernetes.io/projected/724d4392-4880-4e98-b78e-676b080c32cc-kube-api-access-rdtl9\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.404055 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.437329 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4q6pn" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.437332 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4q6pn" event={"ID":"724d4392-4880-4e98-b78e-676b080c32cc","Type":"ContainerDied","Data":"5eff6855f990f6b0f6ad10b10eda099f99bad4aaf509b476130ed2ab5cc7f78a"} Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.437661 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5eff6855f990f6b0f6ad10b10eda099f99bad4aaf509b476130ed2ab5cc7f78a" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.439129 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lnngq" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.439125 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lnngq" event={"ID":"e763a131-879e-4bfd-8138-2d3da9195289","Type":"ContainerDied","Data":"309ba9344840164f369952815b82449f0e1aed96b049210a1ac0777d4373a26d"} Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.439169 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="309ba9344840164f369952815b82449f0e1aed96b049210a1ac0777d4373a26d" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.441054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7tpfs" event={"ID":"4eb4366c-79ee-4bc5-9434-8bf5134c0fe4","Type":"ContainerDied","Data":"f5529dd0ae72323f3a41ef22d99f0985f2c8d9cc0e4bf8c4bff4b94e31604305"} Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.441086 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5529dd0ae72323f3a41ef22d99f0985f2c8d9cc0e4bf8c4bff4b94e31604305" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.441139 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7tpfs" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.446024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerStarted","Data":"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258"} Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.681110 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:09:17 crc kubenswrapper[4797]: E0104 12:09:17.683548 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="init" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.683629 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="init" Jan 04 12:09:17 crc kubenswrapper[4797]: E0104 12:09:17.683693 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e763a131-879e-4bfd-8138-2d3da9195289" containerName="placement-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.683764 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e763a131-879e-4bfd-8138-2d3da9195289" containerName="placement-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: E0104 12:09:17.683827 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" containerName="keystone-bootstrap" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.683874 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" containerName="keystone-bootstrap" Jan 04 12:09:17 crc kubenswrapper[4797]: E0104 12:09:17.683927 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="724d4392-4880-4e98-b78e-676b080c32cc" containerName="neutron-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.683978 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="724d4392-4880-4e98-b78e-676b080c32cc" containerName="neutron-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: E0104 12:09:17.684058 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.684105 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.684308 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e763a131-879e-4bfd-8138-2d3da9195289" containerName="placement-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.684367 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" containerName="keystone-bootstrap" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.684423 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="724d4392-4880-4e98-b78e-676b080c32cc" containerName="neutron-db-sync" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.684497 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1650ba5a-ccad-4447-ad19-02185249e682" containerName="dnsmasq-dns" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.685337 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.704467 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814409 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlqxq\" (UniqueName: \"kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814472 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814498 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814518 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814582 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.814605 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.829428 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.830694 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.838512 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.839748 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-257gm" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.842101 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.842255 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.856052 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.916337 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.916433 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.916504 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.917324 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.917366 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.917932 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918070 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918191 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918228 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918349 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918495 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9l7l\" (UniqueName: \"kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.918547 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.919302 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.919366 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.920129 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlqxq\" (UniqueName: \"kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.920182 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:17 crc kubenswrapper[4797]: I0104 12:09:17.935780 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlqxq\" (UniqueName: \"kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq\") pod \"dnsmasq-dns-75b8bc4b57-m8l7k\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.008784 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.021187 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9l7l\" (UniqueName: \"kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.021291 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.021324 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.021344 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.021374 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.025860 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.025887 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.026794 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.029510 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.044154 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9l7l\" (UniqueName: \"kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l\") pod \"neutron-569f46ffb4-b7hph\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.152117 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.341696 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.342952 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.349135 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.349288 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.349379 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.349653 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.349904 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.350126 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zjjcs" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.365661 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.378726 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.384563 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.384802 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-45b86" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.384973 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.385082 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.385167 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.385282 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.393124 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430129 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430182 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430216 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430243 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430274 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430301 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430322 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430341 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430359 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430390 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430414 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430435 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430461 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpzws\" (UniqueName: \"kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430478 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc7sx\" (UniqueName: \"kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.430492 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.474680 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532116 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc7sx\" (UniqueName: \"kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532173 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532272 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532297 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532327 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532357 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532407 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532439 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532463 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532487 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532520 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532574 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532627 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532658 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.532704 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpzws\" (UniqueName: \"kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.537001 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.539033 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.552635 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.553235 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.553495 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.553946 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.554238 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.555127 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.555474 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.555644 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.557656 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.557680 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.558880 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpzws\" (UniqueName: \"kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.559822 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc7sx\" (UniqueName: \"kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx\") pod \"keystone-7d545b89c4-j2jcg\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.561769 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs\") pod \"placement-596bb9655b-hsz7j\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.692223 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:18 crc kubenswrapper[4797]: I0104 12:09:18.713894 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.035391 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.161209 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.270431 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:09:19 crc kubenswrapper[4797]: W0104 12:09:19.287216 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf904f7de_5407_4427_a82c_e31b26195c0a.slice/crio-9fd443c8b12b79f5170075de4e6b779c43b6cf2d22c4dadf5b76a0ae32eab5ca WatchSource:0}: Error finding container 9fd443c8b12b79f5170075de4e6b779c43b6cf2d22c4dadf5b76a0ae32eab5ca: Status 404 returned error can't find the container with id 9fd443c8b12b79f5170075de4e6b779c43b6cf2d22c4dadf5b76a0ae32eab5ca Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.460178 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d545b89c4-j2jcg" event={"ID":"5d40c323-3444-4e84-8eb3-799d343c384d","Type":"ContainerStarted","Data":"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.460228 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d545b89c4-j2jcg" event={"ID":"5d40c323-3444-4e84-8eb3-799d343c384d","Type":"ContainerStarted","Data":"718348ab379f9dde9db5d5973a417cbdca056f788e7795a70a4572145188e422"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.461107 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.465351 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerStarted","Data":"39cd871a2cdd33c13df6a2d820e492302dd31e6705fb1d98b69638efa2161416"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.465371 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerStarted","Data":"626d7d88529d7b29071edfac1503cc650c4455e600bac509ae6256dac0d5c888"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.469006 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerStarted","Data":"9fd443c8b12b79f5170075de4e6b779c43b6cf2d22c4dadf5b76a0ae32eab5ca"} Jan 04 12:09:19 crc kubenswrapper[4797]: E0104 12:09:19.482706 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-2hjqq" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.486559 4797 generic.go:334] "Generic (PLEG): container finished" podID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerID="321419da87541bdfc25ccdaa780c8344bc2abebbdd0d71e2ab565d0208150fcb" exitCode=0 Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.486594 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" event={"ID":"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f","Type":"ContainerDied","Data":"321419da87541bdfc25ccdaa780c8344bc2abebbdd0d71e2ab565d0208150fcb"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.486612 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" event={"ID":"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f","Type":"ContainerStarted","Data":"0b7ef079ccfcc5dd325cec9d362a79f807610051a5e39d6e4a48339e282a7713"} Jan 04 12:09:19 crc kubenswrapper[4797]: I0104 12:09:19.504712 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7d545b89c4-j2jcg" podStartSLOduration=1.504691073 podStartE2EDuration="1.504691073s" podCreationTimestamp="2026-01-04 12:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:19.483145405 +0000 UTC m=+1258.340332134" watchObservedRunningTime="2026-01-04 12:09:19.504691073 +0000 UTC m=+1258.361877792" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.499576 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerStarted","Data":"b6e4f4c9fc24c93108e20d30e67e1eacd46dba1b62cf51fbfafb10e51011ab52"} Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.499964 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.506726 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerStarted","Data":"92b755c8a3261041cdba8ae7ff1475f229ccf920b10c9c7daf3c52db8f65c7c8"} Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.506769 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerStarted","Data":"b36b3a17559f71b1c9cfe74bbb4df7e910711b7ff1898a03c491af436d97b4ee"} Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.506785 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.506805 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.512568 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" event={"ID":"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f","Type":"ContainerStarted","Data":"768398bf394f51dda85b437f4cbfc44778f062e261e56f2697dc1c9e7ba28eca"} Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.512728 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.524119 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-569f46ffb4-b7hph" podStartSLOduration=3.524100965 podStartE2EDuration="3.524100965s" podCreationTimestamp="2026-01-04 12:09:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:20.515806746 +0000 UTC m=+1259.372993455" watchObservedRunningTime="2026-01-04 12:09:20.524100965 +0000 UTC m=+1259.381287674" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.543175 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" podStartSLOduration=3.543157827 podStartE2EDuration="3.543157827s" podCreationTimestamp="2026-01-04 12:09:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:20.537439896 +0000 UTC m=+1259.394626605" watchObservedRunningTime="2026-01-04 12:09:20.543157827 +0000 UTC m=+1259.400344536" Jan 04 12:09:20 crc kubenswrapper[4797]: I0104 12:09:20.576906 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-596bb9655b-hsz7j" podStartSLOduration=2.576878495 podStartE2EDuration="2.576878495s" podCreationTimestamp="2026-01-04 12:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:20.563214495 +0000 UTC m=+1259.420401204" watchObservedRunningTime="2026-01-04 12:09:20.576878495 +0000 UTC m=+1259.434065204" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.134392 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.135938 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.140368 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.140821 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183451 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183549 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183594 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183624 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183646 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183676 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhqp2\" (UniqueName: \"kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.183697 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.187604 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285726 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhqp2\" (UniqueName: \"kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285775 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285821 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285873 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285908 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285940 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.285962 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.293756 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.294260 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.294473 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.294665 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.295801 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.298490 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.306562 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhqp2\" (UniqueName: \"kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2\") pod \"neutron-56b94d8bbf-ng2pk\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:21 crc kubenswrapper[4797]: I0104 12:09:21.470196 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:22 crc kubenswrapper[4797]: I0104 12:09:22.266313 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:09:22 crc kubenswrapper[4797]: I0104 12:09:22.532607 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerStarted","Data":"3382c8748f8373d8ffc03485bc6663d561e539d92ffa3d5602aefd3623704534"} Jan 04 12:09:23 crc kubenswrapper[4797]: I0104 12:09:23.543514 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerStarted","Data":"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423"} Jan 04 12:09:23 crc kubenswrapper[4797]: I0104 12:09:23.543734 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerStarted","Data":"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7"} Jan 04 12:09:23 crc kubenswrapper[4797]: I0104 12:09:23.544150 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:23 crc kubenswrapper[4797]: I0104 12:09:23.572547 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-56b94d8bbf-ng2pk" podStartSLOduration=2.57252971 podStartE2EDuration="2.57252971s" podCreationTimestamp="2026-01-04 12:09:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:23.560324778 +0000 UTC m=+1262.417511487" watchObservedRunningTime="2026-01-04 12:09:23.57252971 +0000 UTC m=+1262.429716419" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.010654 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.071224 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.071486 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8495b76777-nfspv" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="dnsmasq-dns" containerID="cri-o://2f71e905c6bd19bf3b24560c7b2912b9eb5d025c169894224f61b0bd8ce66f2e" gracePeriod=10 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.628544 4797 generic.go:334] "Generic (PLEG): container finished" podID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerID="2f71e905c6bd19bf3b24560c7b2912b9eb5d025c169894224f61b0bd8ce66f2e" exitCode=0 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.628654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-nfspv" event={"ID":"8221293a-5cd8-4cca-b221-b30d78f8c2aa","Type":"ContainerDied","Data":"2f71e905c6bd19bf3b24560c7b2912b9eb5d025c169894224f61b0bd8ce66f2e"} Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.628944 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8495b76777-nfspv" event={"ID":"8221293a-5cd8-4cca-b221-b30d78f8c2aa","Type":"ContainerDied","Data":"a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84"} Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.628961 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a581528daa455111534b19f8d373e8d42b4729d70c1a8fe3cb3d57934e837c84" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658312 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerStarted","Data":"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf"} Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658475 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-central-agent" containerID="cri-o://ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e" gracePeriod=30 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658728 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658756 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="proxy-httpd" containerID="cri-o://1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf" gracePeriod=30 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658808 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-notification-agent" containerID="cri-o://fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae" gracePeriod=30 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.658841 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="sg-core" containerID="cri-o://3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258" gracePeriod=30 Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.676444 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qc984" event={"ID":"a8575d68-d47c-4e79-a81f-0690139b672f","Type":"ContainerStarted","Data":"e8f552014f76bc33708fe1e5bfac2986ac1ddb10ea417de60dbeeffb11f80a95"} Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.696373 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.106732541 podStartE2EDuration="45.696353422s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="2026-01-04 12:08:44.570220694 +0000 UTC m=+1223.427407403" lastFinishedPulling="2026-01-04 12:09:28.159841575 +0000 UTC m=+1267.017028284" observedRunningTime="2026-01-04 12:09:28.694830192 +0000 UTC m=+1267.552016901" watchObservedRunningTime="2026-01-04 12:09:28.696353422 +0000 UTC m=+1267.553540131" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.702625 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.721057 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-qc984" podStartSLOduration=2.4152722669999998 podStartE2EDuration="45.721039422s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="2026-01-04 12:08:44.854365997 +0000 UTC m=+1223.711552706" lastFinishedPulling="2026-01-04 12:09:28.160133142 +0000 UTC m=+1267.017319861" observedRunningTime="2026-01-04 12:09:28.714216322 +0000 UTC m=+1267.571403021" watchObservedRunningTime="2026-01-04 12:09:28.721039422 +0000 UTC m=+1267.578226121" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763160 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763328 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763353 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763397 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.763502 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bq9p\" (UniqueName: \"kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p\") pod \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\" (UID: \"8221293a-5cd8-4cca-b221-b30d78f8c2aa\") " Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.775708 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p" (OuterVolumeSpecName: "kube-api-access-8bq9p") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "kube-api-access-8bq9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.830788 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.838447 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config" (OuterVolumeSpecName: "config") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.840216 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.844167 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.854097 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8221293a-5cd8-4cca-b221-b30d78f8c2aa" (UID: "8221293a-5cd8-4cca-b221-b30d78f8c2aa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865361 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865402 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865415 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865427 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865439 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bq9p\" (UniqueName: \"kubernetes.io/projected/8221293a-5cd8-4cca-b221-b30d78f8c2aa-kube-api-access-8bq9p\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[4797]: I0104 12:09:28.865451 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8221293a-5cd8-4cca-b221-b30d78f8c2aa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.687872 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7dbzx" event={"ID":"65e365e6-5912-434a-a269-85dc5254dcba","Type":"ContainerStarted","Data":"4de439556b1a7f8f1a32a117874611f6001902f3c668cb6709d4d266700b3fee"} Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692336 4797 generic.go:334] "Generic (PLEG): container finished" podID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerID="1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf" exitCode=0 Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692389 4797 generic.go:334] "Generic (PLEG): container finished" podID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerID="3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258" exitCode=2 Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692400 4797 generic.go:334] "Generic (PLEG): container finished" podID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerID="ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e" exitCode=0 Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692429 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerDied","Data":"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf"} Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692474 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8495b76777-nfspv" Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692541 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerDied","Data":"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258"} Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.692564 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerDied","Data":"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e"} Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.719610 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7dbzx" podStartSLOduration=3.2966413 podStartE2EDuration="46.719586343s" podCreationTimestamp="2026-01-04 12:08:43 +0000 UTC" firstStartedPulling="2026-01-04 12:08:44.73533817 +0000 UTC m=+1223.592524879" lastFinishedPulling="2026-01-04 12:09:28.158283213 +0000 UTC m=+1267.015469922" observedRunningTime="2026-01-04 12:09:29.708948343 +0000 UTC m=+1268.566135072" watchObservedRunningTime="2026-01-04 12:09:29.719586343 +0000 UTC m=+1268.576773052" Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.745406 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:09:29 crc kubenswrapper[4797]: I0104 12:09:29.752303 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8495b76777-nfspv"] Jan 04 12:09:31 crc kubenswrapper[4797]: I0104 12:09:31.487588 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" path="/var/lib/kubelet/pods/8221293a-5cd8-4cca-b221-b30d78f8c2aa/volumes" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.621333 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.792429 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.792608 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6c8w\" (UniqueName: \"kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.792676 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.792736 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.793346 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.793426 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.793477 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.793653 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd\") pod \"80cfd955-bb6d-41cc-b2e3-148836ced610\" (UID: \"80cfd955-bb6d-41cc-b2e3-148836ced610\") " Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.794414 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.794592 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.794621 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80cfd955-bb6d-41cc-b2e3-148836ced610-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.797365 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts" (OuterVolumeSpecName: "scripts") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.797719 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w" (OuterVolumeSpecName: "kube-api-access-n6c8w") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "kube-api-access-n6c8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.838082 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.862130 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.877523 4797 generic.go:334] "Generic (PLEG): container finished" podID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerID="fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae" exitCode=0 Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.877805 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.879029 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerDied","Data":"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae"} Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.879076 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80cfd955-bb6d-41cc-b2e3-148836ced610","Type":"ContainerDied","Data":"f937747a89dac243fd500323fa05bb5744617676dac1dc71d2e584bb1f3073ed"} Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.879226 4797 scope.go:117] "RemoveContainer" containerID="1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.886409 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2hjqq" event={"ID":"e64d6948-1c4e-4db6-b739-24b2aba46fd3","Type":"ContainerStarted","Data":"cda8ef357d1d23977cac785ecf306bc2c2c9ad45f4365b9a32302df789764ffb"} Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.891682 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data" (OuterVolumeSpecName: "config-data") pod "80cfd955-bb6d-41cc-b2e3-148836ced610" (UID: "80cfd955-bb6d-41cc-b2e3-148836ced610"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.895428 4797 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.895461 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6c8w\" (UniqueName: \"kubernetes.io/projected/80cfd955-bb6d-41cc-b2e3-148836ced610-kube-api-access-n6c8w\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.895474 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.895486 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.895496 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cfd955-bb6d-41cc-b2e3-148836ced610-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.944903 4797 scope.go:117] "RemoveContainer" containerID="3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.965735 4797 scope.go:117] "RemoveContainer" containerID="fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.982458 4797 scope.go:117] "RemoveContainer" containerID="ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.998546 4797 scope.go:117] "RemoveContainer" containerID="1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf" Jan 04 12:09:32 crc kubenswrapper[4797]: E0104 12:09:32.998959 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf\": container with ID starting with 1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf not found: ID does not exist" containerID="1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.999015 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf"} err="failed to get container status \"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf\": rpc error: code = NotFound desc = could not find container \"1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf\": container with ID starting with 1974f9dd566378f0f5413885bd956d73b7f05954065568f48e92dec79dc617cf not found: ID does not exist" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.999042 4797 scope.go:117] "RemoveContainer" containerID="3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258" Jan 04 12:09:32 crc kubenswrapper[4797]: E0104 12:09:32.999370 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258\": container with ID starting with 3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258 not found: ID does not exist" containerID="3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.999443 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258"} err="failed to get container status \"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258\": rpc error: code = NotFound desc = could not find container \"3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258\": container with ID starting with 3396003f5354c930b06e8b4d03900530637ef398ccc6550ebb77b973c28d0258 not found: ID does not exist" Jan 04 12:09:32 crc kubenswrapper[4797]: I0104 12:09:32.999475 4797 scope.go:117] "RemoveContainer" containerID="fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:32.999950 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae\": container with ID starting with fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae not found: ID does not exist" containerID="fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.000024 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae"} err="failed to get container status \"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae\": rpc error: code = NotFound desc = could not find container \"fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae\": container with ID starting with fcda8974e0be1c6c814685a0b984ae30b29ead11841fd128f3513379c61c4eae not found: ID does not exist" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.000058 4797 scope.go:117] "RemoveContainer" containerID="ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.000333 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e\": container with ID starting with ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e not found: ID does not exist" containerID="ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.000353 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e"} err="failed to get container status \"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e\": rpc error: code = NotFound desc = could not find container \"ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e\": container with ID starting with ecb0b04e3806114b200fa7c864d16debe6ec633d08903edc92afbe2e5ef8e81e not found: ID does not exist" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.232568 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-2hjqq" podStartSLOduration=2.425738158 podStartE2EDuration="1m18.232530518s" podCreationTimestamp="2026-01-04 12:08:15 +0000 UTC" firstStartedPulling="2026-01-04 12:08:16.210357844 +0000 UTC m=+1195.067544563" lastFinishedPulling="2026-01-04 12:09:32.017150204 +0000 UTC m=+1270.874336923" observedRunningTime="2026-01-04 12:09:32.90355334 +0000 UTC m=+1271.760740059" watchObservedRunningTime="2026-01-04 12:09:33.232530518 +0000 UTC m=+1272.089717267" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.239090 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.253130 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.276860 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277591 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="proxy-httpd" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277649 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="proxy-httpd" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277699 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="init" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277721 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="init" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277783 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-notification-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277805 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-notification-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277855 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="dnsmasq-dns" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277887 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="dnsmasq-dns" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277911 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="sg-core" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277928 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="sg-core" Jan 04 12:09:33 crc kubenswrapper[4797]: E0104 12:09:33.277959 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-central-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.277978 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-central-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.278399 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="sg-core" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.278481 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8221293a-5cd8-4cca-b221-b30d78f8c2aa" containerName="dnsmasq-dns" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.278542 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-notification-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.278627 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="ceilometer-central-agent" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.278671 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" containerName="proxy-httpd" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.282741 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.285274 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.285566 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.291056 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403429 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd5g7\" (UniqueName: \"kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403479 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403519 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403541 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403577 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403629 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.403650 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.492173 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80cfd955-bb6d-41cc-b2e3-148836ced610" path="/var/lib/kubelet/pods/80cfd955-bb6d-41cc-b2e3-148836ced610/volumes" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505627 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505707 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505803 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd5g7\" (UniqueName: \"kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505849 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505892 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.505930 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.506067 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.507587 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.507611 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.512226 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.512314 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.523740 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.526636 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.536443 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd5g7\" (UniqueName: \"kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7\") pod \"ceilometer-0\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " pod="openstack/ceilometer-0" Jan 04 12:09:33 crc kubenswrapper[4797]: I0104 12:09:33.601015 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:34 crc kubenswrapper[4797]: I0104 12:09:34.098127 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:34 crc kubenswrapper[4797]: W0104 12:09:34.108972 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef8e3a41_8586_4918_b62a_0e1b49eab563.slice/crio-432d603f2c60fc65a6d0bd18d71fa0ab03a4078476c0d6dddb9419629aa44da3 WatchSource:0}: Error finding container 432d603f2c60fc65a6d0bd18d71fa0ab03a4078476c0d6dddb9419629aa44da3: Status 404 returned error can't find the container with id 432d603f2c60fc65a6d0bd18d71fa0ab03a4078476c0d6dddb9419629aa44da3 Jan 04 12:09:34 crc kubenswrapper[4797]: I0104 12:09:34.907898 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerStarted","Data":"432d603f2c60fc65a6d0bd18d71fa0ab03a4078476c0d6dddb9419629aa44da3"} Jan 04 12:09:35 crc kubenswrapper[4797]: I0104 12:09:35.919430 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerStarted","Data":"1f780000cda1f9e5d097d614a4f6b4fc2b3a9e1ee1d4fb67355fe56852e25e6e"} Jan 04 12:09:35 crc kubenswrapper[4797]: I0104 12:09:35.919892 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerStarted","Data":"6494e67ca81405b9e8f0d2cdc8b4a8992457dd364e1e91cec429b0d77957ad15"} Jan 04 12:09:35 crc kubenswrapper[4797]: I0104 12:09:35.921554 4797 generic.go:334] "Generic (PLEG): container finished" podID="a8575d68-d47c-4e79-a81f-0690139b672f" containerID="e8f552014f76bc33708fe1e5bfac2986ac1ddb10ea417de60dbeeffb11f80a95" exitCode=0 Jan 04 12:09:35 crc kubenswrapper[4797]: I0104 12:09:35.921585 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qc984" event={"ID":"a8575d68-d47c-4e79-a81f-0690139b672f","Type":"ContainerDied","Data":"e8f552014f76bc33708fe1e5bfac2986ac1ddb10ea417de60dbeeffb11f80a95"} Jan 04 12:09:36 crc kubenswrapper[4797]: I0104 12:09:36.932597 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerStarted","Data":"da45682938b825cad4ca44b6d16c43030f0be18a5ad1272afb8461dde57df4a1"} Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.376264 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qc984" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.394690 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle\") pod \"a8575d68-d47c-4e79-a81f-0690139b672f\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.394775 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data\") pod \"a8575d68-d47c-4e79-a81f-0690139b672f\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.394827 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd7zz\" (UniqueName: \"kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz\") pod \"a8575d68-d47c-4e79-a81f-0690139b672f\" (UID: \"a8575d68-d47c-4e79-a81f-0690139b672f\") " Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.405080 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a8575d68-d47c-4e79-a81f-0690139b672f" (UID: "a8575d68-d47c-4e79-a81f-0690139b672f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.418301 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz" (OuterVolumeSpecName: "kube-api-access-bd7zz") pod "a8575d68-d47c-4e79-a81f-0690139b672f" (UID: "a8575d68-d47c-4e79-a81f-0690139b672f"). InnerVolumeSpecName "kube-api-access-bd7zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.427909 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8575d68-d47c-4e79-a81f-0690139b672f" (UID: "a8575d68-d47c-4e79-a81f-0690139b672f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.496200 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.496241 4797 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8575d68-d47c-4e79-a81f-0690139b672f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.496259 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd7zz\" (UniqueName: \"kubernetes.io/projected/a8575d68-d47c-4e79-a81f-0690139b672f-kube-api-access-bd7zz\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.941234 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qc984" event={"ID":"a8575d68-d47c-4e79-a81f-0690139b672f","Type":"ContainerDied","Data":"0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866"} Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.941289 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d0d81c785b2ba7c071f5de8633ecff0202d0c17457598eb70324f9cddbf4866" Jan 04 12:09:37 crc kubenswrapper[4797]: I0104 12:09:37.941371 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qc984" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.369505 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:09:38 crc kubenswrapper[4797]: E0104 12:09:38.370108 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" containerName="barbican-db-sync" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.370125 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" containerName="barbican-db-sync" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.370295 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" containerName="barbican-db-sync" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.371195 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.376166 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-grbjg" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.376198 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.376234 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.383609 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.393047 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.400556 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.400612 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.419174 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.490726 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.492082 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.500662 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511148 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511191 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511211 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks9cx\" (UniqueName: \"kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511240 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511266 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511289 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511318 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511355 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511371 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.511417 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-596vd\" (UniqueName: \"kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.574030 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.575667 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.579537 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.584780 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612633 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612674 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612698 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612715 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks9cx\" (UniqueName: \"kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612742 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612777 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.612804 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613028 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613257 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613332 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613373 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613408 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613476 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613504 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7slc\" (UniqueName: \"kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613534 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-596vd\" (UniqueName: \"kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613562 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.613954 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.614408 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.617664 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.624895 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.624929 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.629127 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks9cx\" (UniqueName: \"kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.630437 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.634088 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-596vd\" (UniqueName: \"kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.634316 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data\") pod \"barbican-worker-6bfdff7977-xqk4d\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.656920 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffb4dbc44-wqxfk\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.688961 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.712533 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.714786 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwh6l\" (UniqueName: \"kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.714868 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.714903 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.714943 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715012 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715045 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715071 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7slc\" (UniqueName: \"kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715098 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715144 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715170 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715203 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.715749 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.716012 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.716348 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.716433 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.716583 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.735932 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7slc\" (UniqueName: \"kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc\") pod \"dnsmasq-dns-74477f67b9-khz9p\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.815960 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816072 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816159 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816192 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816232 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwh6l\" (UniqueName: \"kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816350 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.816726 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.826802 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.829529 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.837478 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwh6l\" (UniqueName: \"kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.837497 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom\") pod \"barbican-api-5dc4bd8cbd-x28kr\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.915493 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:38 crc kubenswrapper[4797]: I0104 12:09:38.980969 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.008504 4797 generic.go:334] "Generic (PLEG): container finished" podID="65e365e6-5912-434a-a269-85dc5254dcba" containerID="4de439556b1a7f8f1a32a117874611f6001902f3c668cb6709d4d266700b3fee" exitCode=0 Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.008588 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7dbzx" event={"ID":"65e365e6-5912-434a-a269-85dc5254dcba","Type":"ContainerDied","Data":"4de439556b1a7f8f1a32a117874611f6001902f3c668cb6709d4d266700b3fee"} Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.038599 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerStarted","Data":"71e576ed5630817ceaae48a3176ddc4109c71e8b7b908c72170ef6f0e9a4f7e6"} Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.039646 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.072583 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5831621569999998 podStartE2EDuration="6.072559662s" podCreationTimestamp="2026-01-04 12:09:33 +0000 UTC" firstStartedPulling="2026-01-04 12:09:34.120314071 +0000 UTC m=+1272.977500790" lastFinishedPulling="2026-01-04 12:09:38.609711586 +0000 UTC m=+1277.466898295" observedRunningTime="2026-01-04 12:09:39.06946193 +0000 UTC m=+1277.926648659" watchObservedRunningTime="2026-01-04 12:09:39.072559662 +0000 UTC m=+1277.929746371" Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.263189 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:09:39 crc kubenswrapper[4797]: W0104 12:09:39.271847 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3571eba4_ffe7_46c9_a3ba_895a81b311a9.slice/crio-f7ac70e6b68a51082afd56d60b02927d741e4680ca97aefcd3a70017764a23ac WatchSource:0}: Error finding container f7ac70e6b68a51082afd56d60b02927d741e4680ca97aefcd3a70017764a23ac: Status 404 returned error can't find the container with id f7ac70e6b68a51082afd56d60b02927d741e4680ca97aefcd3a70017764a23ac Jan 04 12:09:39 crc kubenswrapper[4797]: W0104 12:09:39.364029 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb35795cc_182a_4f7b_86ba_157d27ab5311.slice/crio-03bb463f3c86f60d5e074da00267c6d3fe22df519bf68e73d269833d18709b3c WatchSource:0}: Error finding container 03bb463f3c86f60d5e074da00267c6d3fe22df519bf68e73d269833d18709b3c: Status 404 returned error can't find the container with id 03bb463f3c86f60d5e074da00267c6d3fe22df519bf68e73d269833d18709b3c Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.367445 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:39 crc kubenswrapper[4797]: I0104 12:09:39.470116 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:39 crc kubenswrapper[4797]: W0104 12:09:39.479070 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09fa15e9_1f16_4b21_bdcd_7e35cc7a50fb.slice/crio-f6cdcfa811c66ba4b4963d81cf42a3c55a3ff9f045dd5c22c35dcf0c8478c30a WatchSource:0}: Error finding container f6cdcfa811c66ba4b4963d81cf42a3c55a3ff9f045dd5c22c35dcf0c8478c30a: Status 404 returned error can't find the container with id f6cdcfa811c66ba4b4963d81cf42a3c55a3ff9f045dd5c22c35dcf0c8478c30a Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.049264 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerStarted","Data":"f7ac70e6b68a51082afd56d60b02927d741e4680ca97aefcd3a70017764a23ac"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.051777 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerStarted","Data":"65582f47419050670384c2fc8a984893b34e2018329211e0c162d6dd88c5704d"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.053815 4797 generic.go:334] "Generic (PLEG): container finished" podID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerID="024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee" exitCode=0 Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.054048 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" event={"ID":"b35795cc-182a-4f7b-86ba-157d27ab5311","Type":"ContainerDied","Data":"024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.054075 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" event={"ID":"b35795cc-182a-4f7b-86ba-157d27ab5311","Type":"ContainerStarted","Data":"03bb463f3c86f60d5e074da00267c6d3fe22df519bf68e73d269833d18709b3c"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.061133 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerStarted","Data":"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.061216 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.061231 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerStarted","Data":"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.061240 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerStarted","Data":"f6cdcfa811c66ba4b4963d81cf42a3c55a3ff9f045dd5c22c35dcf0c8478c30a"} Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.404856 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.426142 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" podStartSLOduration=2.426121117 podStartE2EDuration="2.426121117s" podCreationTimestamp="2026-01-04 12:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:40.098834224 +0000 UTC m=+1278.956020933" watchObservedRunningTime="2026-01-04 12:09:40.426121117 +0000 UTC m=+1279.283307826" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.554236 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.555071 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.555138 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.555201 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.555259 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc96w\" (UniqueName: \"kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.555280 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data\") pod \"65e365e6-5912-434a-a269-85dc5254dcba\" (UID: \"65e365e6-5912-434a-a269-85dc5254dcba\") " Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.557139 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.560529 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w" (OuterVolumeSpecName: "kube-api-access-vc96w") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "kube-api-access-vc96w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.561605 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts" (OuterVolumeSpecName: "scripts") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.561624 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.599043 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.608414 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data" (OuterVolumeSpecName: "config-data") pod "65e365e6-5912-434a-a269-85dc5254dcba" (UID: "65e365e6-5912-434a-a269-85dc5254dcba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659320 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659594 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659604 4797 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659613 4797 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65e365e6-5912-434a-a269-85dc5254dcba-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659622 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc96w\" (UniqueName: \"kubernetes.io/projected/65e365e6-5912-434a-a269-85dc5254dcba-kube-api-access-vc96w\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[4797]: I0104 12:09:40.659632 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65e365e6-5912-434a-a269-85dc5254dcba-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.068213 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" event={"ID":"b35795cc-182a-4f7b-86ba-157d27ab5311","Type":"ContainerStarted","Data":"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb"} Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.068349 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.069639 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7dbzx" event={"ID":"65e365e6-5912-434a-a269-85dc5254dcba","Type":"ContainerDied","Data":"20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4"} Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.069668 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20b423fe12bce47f173d2ebf441de43dc0a7b39558ede7e6d7e24f967c7091e4" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.069723 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7dbzx" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.070046 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.090961 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" podStartSLOduration=3.090941985 podStartE2EDuration="3.090941985s" podCreationTimestamp="2026-01-04 12:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:41.086398966 +0000 UTC m=+1279.943585675" watchObservedRunningTime="2026-01-04 12:09:41.090941985 +0000 UTC m=+1279.948128684" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.302395 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:41 crc kubenswrapper[4797]: E0104 12:09:41.303356 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65e365e6-5912-434a-a269-85dc5254dcba" containerName="cinder-db-sync" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.303381 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="65e365e6-5912-434a-a269-85dc5254dcba" containerName="cinder-db-sync" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.303626 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="65e365e6-5912-434a-a269-85dc5254dcba" containerName="cinder-db-sync" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.306215 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.312653 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.315406 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.315592 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.315695 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wxlrf" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.315790 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.414898 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485004 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m5dh\" (UniqueName: \"kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485066 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485106 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485153 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485173 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.485199 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.533961 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.536477 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.536602 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.548718 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.556188 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.559491 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.586953 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m5dh\" (UniqueName: \"kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.587021 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.587065 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.587114 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.587138 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.587157 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.589158 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.593834 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.594816 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.596543 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.603502 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.608853 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.610975 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m5dh\" (UniqueName: \"kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh\") pod \"cinder-scheduler-0\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.636137 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.688816 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.688888 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689039 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgrcp\" (UniqueName: \"kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689084 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689167 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689208 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689267 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689484 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g454\" (UniqueName: \"kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689627 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689693 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689762 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689818 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.689893 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791626 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g454\" (UniqueName: \"kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791711 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791745 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791765 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791785 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791817 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791840 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791867 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791904 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgrcp\" (UniqueName: \"kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791920 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791954 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.791979 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.792022 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.792489 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.792806 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.793048 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.793876 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.794234 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.794766 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.795284 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.798344 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.804083 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.807405 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.888575 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.897526 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g454\" (UniqueName: \"kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454\") pod \"dnsmasq-dns-5957fff8cc-g9pj9\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.900200 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgrcp\" (UniqueName: \"kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp\") pod \"cinder-api-0\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " pod="openstack/cinder-api-0" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.968128 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.969855 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.973601 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.973790 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 04 12:09:41 crc kubenswrapper[4797]: I0104 12:09:41.991121 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.054535 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.060683 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100290 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100399 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100462 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100499 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100544 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28f2w\" (UniqueName: \"kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100568 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.100609 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.192443 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.203521 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.203933 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.204085 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.204148 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28f2w\" (UniqueName: \"kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.204182 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.204298 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.204398 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.209661 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.211262 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.214073 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.215756 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.228246 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.228809 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.229382 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28f2w\" (UniqueName: \"kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w\") pod \"barbican-api-7c675d9b9b-9fg4r\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.292721 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.661911 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.793601 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:42 crc kubenswrapper[4797]: I0104 12:09:42.934736 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.113033 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerStarted","Data":"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.113092 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerStarted","Data":"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.116313 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerStarted","Data":"044959df9742f5a9eefddc330a19aad94ea26b7b908cfb6fc2f7ac6006d93f22"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.138154 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" podStartSLOduration=2.760451248 podStartE2EDuration="5.138134739s" podCreationTimestamp="2026-01-04 12:09:38 +0000 UTC" firstStartedPulling="2026-01-04 12:09:39.03529083 +0000 UTC m=+1277.892477539" lastFinishedPulling="2026-01-04 12:09:41.412974321 +0000 UTC m=+1280.270161030" observedRunningTime="2026-01-04 12:09:43.13021173 +0000 UTC m=+1281.987398449" watchObservedRunningTime="2026-01-04 12:09:43.138134739 +0000 UTC m=+1281.995321448" Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.169853 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerStarted","Data":"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.169912 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerStarted","Data":"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.180343 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerStarted","Data":"c7f6ed60cff4dde820294d39b579f0af4aa3677e17cf9b6498cb505f06798f06"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.200432 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="dnsmasq-dns" containerID="cri-o://a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb" gracePeriod=10 Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.200903 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" event={"ID":"1454e128-5003-41cb-96b6-25cd199aea43","Type":"ContainerStarted","Data":"9356474f2393050a3b4284a63e64ce43ef9bcfb07ebfcc5a49320998affc2b06"} Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.251355 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6bfdff7977-xqk4d" podStartSLOduration=3.117914178 podStartE2EDuration="5.251336752s" podCreationTimestamp="2026-01-04 12:09:38 +0000 UTC" firstStartedPulling="2026-01-04 12:09:39.279074084 +0000 UTC m=+1278.136260793" lastFinishedPulling="2026-01-04 12:09:41.412496658 +0000 UTC m=+1280.269683367" observedRunningTime="2026-01-04 12:09:43.194800972 +0000 UTC m=+1282.051987681" watchObservedRunningTime="2026-01-04 12:09:43.251336752 +0000 UTC m=+1282.108523461" Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.829389 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.968277 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.968794 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.968886 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.969080 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.969168 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7slc\" (UniqueName: \"kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.969217 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0\") pod \"b35795cc-182a-4f7b-86ba-157d27ab5311\" (UID: \"b35795cc-182a-4f7b-86ba-157d27ab5311\") " Jan 04 12:09:43 crc kubenswrapper[4797]: I0104 12:09:43.991130 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc" (OuterVolumeSpecName: "kube-api-access-b7slc") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "kube-api-access-b7slc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.072337 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7slc\" (UniqueName: \"kubernetes.io/projected/b35795cc-182a-4f7b-86ba-157d27ab5311-kube-api-access-b7slc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.116815 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.122629 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.144443 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.157474 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.168609 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config" (OuterVolumeSpecName: "config") pod "b35795cc-182a-4f7b-86ba-157d27ab5311" (UID: "b35795cc-182a-4f7b-86ba-157d27ab5311"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.176944 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.177316 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.177339 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.177351 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.177363 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b35795cc-182a-4f7b-86ba-157d27ab5311-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.224111 4797 generic.go:334] "Generic (PLEG): container finished" podID="1454e128-5003-41cb-96b6-25cd199aea43" containerID="ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851" exitCode=0 Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.224175 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" event={"ID":"1454e128-5003-41cb-96b6-25cd199aea43","Type":"ContainerDied","Data":"ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.224205 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" event={"ID":"1454e128-5003-41cb-96b6-25cd199aea43","Type":"ContainerStarted","Data":"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.225275 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.231881 4797 generic.go:334] "Generic (PLEG): container finished" podID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerID="a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb" exitCode=0 Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.231963 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" event={"ID":"b35795cc-182a-4f7b-86ba-157d27ab5311","Type":"ContainerDied","Data":"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.232004 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" event={"ID":"b35795cc-182a-4f7b-86ba-157d27ab5311","Type":"ContainerDied","Data":"03bb463f3c86f60d5e074da00267c6d3fe22df519bf68e73d269833d18709b3c"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.232021 4797 scope.go:117] "RemoveContainer" containerID="a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.232133 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74477f67b9-khz9p" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.247026 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" podStartSLOduration=3.247005257 podStartE2EDuration="3.247005257s" podCreationTimestamp="2026-01-04 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:44.243369941 +0000 UTC m=+1283.100556650" watchObservedRunningTime="2026-01-04 12:09:44.247005257 +0000 UTC m=+1283.104191966" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.252764 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerStarted","Data":"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.257605 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerStarted","Data":"90b02fc9419aacb467bc917b82b3cf36fa359aa8f7e3da9a5e9dd0d2acd2ff8b"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.257663 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.257725 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.257735 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerStarted","Data":"3e3a15f869a29fb29c3c3b83c8c750fb50a7d3d3675123fde9492287428afb82"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.257748 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerStarted","Data":"7eea30a9665ea6482ee0ce5f14ac5320fae71817557119980ebba26d32335361"} Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.288596 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.295708 4797 scope.go:117] "RemoveContainer" containerID="024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.303038 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74477f67b9-khz9p"] Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.315194 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7c675d9b9b-9fg4r" podStartSLOduration=3.315175583 podStartE2EDuration="3.315175583s" podCreationTimestamp="2026-01-04 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:44.290560934 +0000 UTC m=+1283.147747643" watchObservedRunningTime="2026-01-04 12:09:44.315175583 +0000 UTC m=+1283.172362292" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.357857 4797 scope.go:117] "RemoveContainer" containerID="a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb" Jan 04 12:09:44 crc kubenswrapper[4797]: E0104 12:09:44.358268 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb\": container with ID starting with a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb not found: ID does not exist" containerID="a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.358297 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb"} err="failed to get container status \"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb\": rpc error: code = NotFound desc = could not find container \"a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb\": container with ID starting with a2d20f287e756dde308c123c3854fe5d2f769af17ed3e8f07dec6421db64cecb not found: ID does not exist" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.358316 4797 scope.go:117] "RemoveContainer" containerID="024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee" Jan 04 12:09:44 crc kubenswrapper[4797]: E0104 12:09:44.358480 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee\": container with ID starting with 024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee not found: ID does not exist" containerID="024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.358499 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee"} err="failed to get container status \"024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee\": rpc error: code = NotFound desc = could not find container \"024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee\": container with ID starting with 024ae4a5c9de1d29b10f4ebeb31f0546184b7d8dbe576764d2ecb3b3091191ee not found: ID does not exist" Jan 04 12:09:44 crc kubenswrapper[4797]: I0104 12:09:44.944249 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.275910 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerStarted","Data":"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4"} Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.277103 4797 generic.go:334] "Generic (PLEG): container finished" podID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" containerID="cda8ef357d1d23977cac785ecf306bc2c2c9ad45f4365b9a32302df789764ffb" exitCode=0 Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.277144 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2hjqq" event={"ID":"e64d6948-1c4e-4db6-b739-24b2aba46fd3","Type":"ContainerDied","Data":"cda8ef357d1d23977cac785ecf306bc2c2c9ad45f4365b9a32302df789764ffb"} Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.279310 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerStarted","Data":"faa10c4e936f21cacd7a55a8b34ae899bc8568cb03f8bbf7c8fdacc764b81f46"} Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.279338 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerStarted","Data":"52c365e971560c89bb0a88661d1e1a6b7673e0f78e38552e4b757f799cae7f5c"} Jan 04 12:09:45 crc kubenswrapper[4797]: I0104 12:09:45.487028 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" path="/var/lib/kubelet/pods/b35795cc-182a-4f7b-86ba-157d27ab5311/volumes" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.289478 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api-log" containerID="cri-o://688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" gracePeriod=30 Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.289511 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api" containerID="cri-o://ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" gracePeriod=30 Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.320353 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.320336409 podStartE2EDuration="5.320336409s" podCreationTimestamp="2026-01-04 12:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:46.314189367 +0000 UTC m=+1285.171376076" watchObservedRunningTime="2026-01-04 12:09:46.320336409 +0000 UTC m=+1285.177523118" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.347791 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.317531296 podStartE2EDuration="5.347767712s" podCreationTimestamp="2026-01-04 12:09:41 +0000 UTC" firstStartedPulling="2026-01-04 12:09:42.228942142 +0000 UTC m=+1281.086128861" lastFinishedPulling="2026-01-04 12:09:43.259178568 +0000 UTC m=+1282.116365277" observedRunningTime="2026-01-04 12:09:46.34428008 +0000 UTC m=+1285.201466789" watchObservedRunningTime="2026-01-04 12:09:46.347767712 +0000 UTC m=+1285.204954431" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.643218 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.826200 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2hjqq" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.930933 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data\") pod \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.931027 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwz2n\" (UniqueName: \"kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n\") pod \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.931066 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data\") pod \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.931143 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle\") pod \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\" (UID: \"e64d6948-1c4e-4db6-b739-24b2aba46fd3\") " Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.936429 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n" (OuterVolumeSpecName: "kube-api-access-pwz2n") pod "e64d6948-1c4e-4db6-b739-24b2aba46fd3" (UID: "e64d6948-1c4e-4db6-b739-24b2aba46fd3"). InnerVolumeSpecName "kube-api-access-pwz2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.942196 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e64d6948-1c4e-4db6-b739-24b2aba46fd3" (UID: "e64d6948-1c4e-4db6-b739-24b2aba46fd3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.960742 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e64d6948-1c4e-4db6-b739-24b2aba46fd3" (UID: "e64d6948-1c4e-4db6-b739-24b2aba46fd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[4797]: I0104 12:09:46.987157 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data" (OuterVolumeSpecName: "config-data") pod "e64d6948-1c4e-4db6-b739-24b2aba46fd3" (UID: "e64d6948-1c4e-4db6-b739-24b2aba46fd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.001903 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.040079 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.040149 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwz2n\" (UniqueName: \"kubernetes.io/projected/e64d6948-1c4e-4db6-b739-24b2aba46fd3-kube-api-access-pwz2n\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.040177 4797 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.040191 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e64d6948-1c4e-4db6-b739-24b2aba46fd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141049 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141103 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141130 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141180 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141239 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgrcp\" (UniqueName: \"kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141353 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141388 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts\") pod \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\" (UID: \"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774\") " Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141229 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141870 4797 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.141905 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs" (OuterVolumeSpecName: "logs") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.145398 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.145417 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp" (OuterVolumeSpecName: "kube-api-access-qgrcp") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "kube-api-access-qgrcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.147489 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts" (OuterVolumeSpecName: "scripts") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.172215 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.186952 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data" (OuterVolumeSpecName: "config-data") pod "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" (UID: "45b175ec-30bd-4a8e-adb5-a3f1bbbd9774"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243700 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243740 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243751 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243763 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgrcp\" (UniqueName: \"kubernetes.io/projected/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-kube-api-access-qgrcp\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243773 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.243781 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301483 4797 generic.go:334] "Generic (PLEG): container finished" podID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerID="ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" exitCode=0 Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301513 4797 generic.go:334] "Generic (PLEG): container finished" podID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerID="688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" exitCode=143 Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301563 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerDied","Data":"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4"} Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301591 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerDied","Data":"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5"} Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301602 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"45b175ec-30bd-4a8e-adb5-a3f1bbbd9774","Type":"ContainerDied","Data":"044959df9742f5a9eefddc330a19aad94ea26b7b908cfb6fc2f7ac6006d93f22"} Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301607 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.301617 4797 scope.go:117] "RemoveContainer" containerID="ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.304677 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2hjqq" event={"ID":"e64d6948-1c4e-4db6-b739-24b2aba46fd3","Type":"ContainerDied","Data":"f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0"} Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.304706 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f059e09ba0b0c946353056ae24d1ef210c2933ceaa0f5a97b141ddc848e44ad0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.304684 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2hjqq" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.325840 4797 scope.go:117] "RemoveContainer" containerID="688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.359383 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.361060 4797 scope.go:117] "RemoveContainer" containerID="ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.361559 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4\": container with ID starting with ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4 not found: ID does not exist" containerID="ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.361667 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4"} err="failed to get container status \"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4\": rpc error: code = NotFound desc = could not find container \"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4\": container with ID starting with ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4 not found: ID does not exist" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.361761 4797 scope.go:117] "RemoveContainer" containerID="688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.362493 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5\": container with ID starting with 688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5 not found: ID does not exist" containerID="688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.362590 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5"} err="failed to get container status \"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5\": rpc error: code = NotFound desc = could not find container \"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5\": container with ID starting with 688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5 not found: ID does not exist" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.362682 4797 scope.go:117] "RemoveContainer" containerID="ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.363042 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4"} err="failed to get container status \"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4\": rpc error: code = NotFound desc = could not find container \"ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4\": container with ID starting with ebc9a263739775dd62bb0972d8e0641049d7b5425465a569e31780bc1366f7e4 not found: ID does not exist" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.363107 4797 scope.go:117] "RemoveContainer" containerID="688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.363483 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5"} err="failed to get container status \"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5\": rpc error: code = NotFound desc = could not find container \"688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5\": container with ID starting with 688b6b84eda52145674b41959595e77338fc1c1254a97b5631076e6a3c1f0bd5 not found: ID does not exist" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.365235 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381055 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.381368 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="init" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381383 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="init" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.381407 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api-log" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381414 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api-log" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.381424 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" containerName="glance-db-sync" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381430 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" containerName="glance-db-sync" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.381442 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381448 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api" Jan 04 12:09:47 crc kubenswrapper[4797]: E0104 12:09:47.381460 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381465 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381622 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api-log" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381632 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" containerName="glance-db-sync" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381640 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b35795cc-182a-4f7b-86ba-157d27ab5311" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.381661 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" containerName="cinder-api" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.382474 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.387738 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.387744 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.387745 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.400823 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.484146 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45b175ec-30bd-4a8e-adb5-a3f1bbbd9774" path="/var/lib/kubelet/pods/45b175ec-30bd-4a8e-adb5-a3f1bbbd9774/volumes" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.551751 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.551982 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552084 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552227 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552334 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552455 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552543 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552635 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.552707 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl7lt\" (UniqueName: \"kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.654331 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.654586 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.654659 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.655124 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.655659 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.655709 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.655822 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.655886 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.656271 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.656327 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl7lt\" (UniqueName: \"kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.656096 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.664940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.665432 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.667943 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.668799 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.669928 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.676529 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl7lt\" (UniqueName: \"kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.695695 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.705128 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.714724 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.715042 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="dnsmasq-dns" containerID="cri-o://96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753" gracePeriod=10 Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.760376 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.762209 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.789952 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869276 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpnt2\" (UniqueName: \"kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869402 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869460 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869510 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869534 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.869596 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971334 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpnt2\" (UniqueName: \"kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971628 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971663 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971693 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971710 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.971748 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.972937 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.973672 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.974178 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.974745 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.974816 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:47 crc kubenswrapper[4797]: I0104 12:09:47.991880 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpnt2\" (UniqueName: \"kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2\") pod \"dnsmasq-dns-6b4f5fc4f-wtcrw\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.175469 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.215619 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.300677 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.301585 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.355225 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerStarted","Data":"2bef0afc0189b44a53c78411bbab4568c071b7676673fcac3d2cb05957daa757"} Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.360114 4797 generic.go:334] "Generic (PLEG): container finished" podID="1454e128-5003-41cb-96b6-25cd199aea43" containerID="96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753" exitCode=0 Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.360149 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" event={"ID":"1454e128-5003-41cb-96b6-25cd199aea43","Type":"ContainerDied","Data":"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753"} Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.360170 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" event={"ID":"1454e128-5003-41cb-96b6-25cd199aea43","Type":"ContainerDied","Data":"9356474f2393050a3b4284a63e64ce43ef9bcfb07ebfcc5a49320998affc2b06"} Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.360187 4797 scope.go:117] "RemoveContainer" containerID="96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.360318 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5957fff8cc-g9pj9" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.472215 4797 scope.go:117] "RemoveContainer" containerID="ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.485380 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.485496 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.485515 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.485529 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.487588 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.487676 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g454\" (UniqueName: \"kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454\") pod \"1454e128-5003-41cb-96b6-25cd199aea43\" (UID: \"1454e128-5003-41cb-96b6-25cd199aea43\") " Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.497896 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454" (OuterVolumeSpecName: "kube-api-access-7g454") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "kube-api-access-7g454". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.507129 4797 scope.go:117] "RemoveContainer" containerID="96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753" Jan 04 12:09:48 crc kubenswrapper[4797]: E0104 12:09:48.523144 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753\": container with ID starting with 96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753 not found: ID does not exist" containerID="96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.523201 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753"} err="failed to get container status \"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753\": rpc error: code = NotFound desc = could not find container \"96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753\": container with ID starting with 96f17485acd0a5a8164666a658649091007039426d0ae7d542b5f6ce043fc753 not found: ID does not exist" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.523231 4797 scope.go:117] "RemoveContainer" containerID="ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851" Jan 04 12:09:48 crc kubenswrapper[4797]: E0104 12:09:48.527062 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851\": container with ID starting with ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851 not found: ID does not exist" containerID="ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.527088 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851"} err="failed to get container status \"ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851\": rpc error: code = NotFound desc = could not find container \"ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851\": container with ID starting with ec373afb2d1c4e01a240f10bf0df5762e525651048197151253988ad7fc9e851 not found: ID does not exist" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.592504 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g454\" (UniqueName: \"kubernetes.io/projected/1454e128-5003-41cb-96b6-25cd199aea43-kube-api-access-7g454\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.595282 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.630524 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.650305 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.661153 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config" (OuterVolumeSpecName: "config") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.663835 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1454e128-5003-41cb-96b6-25cd199aea43" (UID: "1454e128-5003-41cb-96b6-25cd199aea43"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.672817 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:48 crc kubenswrapper[4797]: E0104 12:09:48.673197 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="init" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.673212 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="init" Jan 04 12:09:48 crc kubenswrapper[4797]: E0104 12:09:48.673229 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="dnsmasq-dns" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.673236 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="dnsmasq-dns" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.673416 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1454e128-5003-41cb-96b6-25cd199aea43" containerName="dnsmasq-dns" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.675813 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.678022 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.678379 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.678509 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-plvd7" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.709690 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.722156 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.722183 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.722193 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.722202 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.722210 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1454e128-5003-41cb-96b6-25cd199aea43-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.825793 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc6ch\" (UniqueName: \"kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826077 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826103 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826122 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826165 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826183 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.826201 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.878677 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.880038 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.893774 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.898208 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927247 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc6ch\" (UniqueName: \"kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927303 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927325 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927342 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927384 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927404 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.927423 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.929512 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.929770 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.936311 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.936340 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.955752 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.958545 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.960629 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[4797]: I0104 12:09:48.973230 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc6ch\" (UniqueName: \"kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.020108 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030466 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssb7c\" (UniqueName: \"kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030541 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030563 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030580 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030596 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030620 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.030645 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.045392 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.062283 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.081416 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5957fff8cc-g9pj9"] Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.137999 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssb7c\" (UniqueName: \"kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138089 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138114 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138134 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138151 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138174 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.138198 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.139714 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.140538 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.140748 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.145321 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.157352 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.168669 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssb7c\" (UniqueName: \"kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.184669 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.217072 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.262386 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.395071 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" event={"ID":"061e64ec-34bd-4c16-8afd-8b03537455ad","Type":"ContainerStarted","Data":"e277dd5c980779b08f403cc97145ecf0ebfb0fc217bb1a7232a9a453e0ddfbab"} Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.526731 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1454e128-5003-41cb-96b6-25cd199aea43" path="/var/lib/kubelet/pods/1454e128-5003-41cb-96b6-25cd199aea43/volumes" Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.708206 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:49 crc kubenswrapper[4797]: I0104 12:09:49.971417 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.452131 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerStarted","Data":"17a586288c4e9f3a6a9be748b2c96e63c91beaf6d5a6b148be035ae82289cceb"} Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.460781 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerStarted","Data":"ef3423ef3ff9696e243a312be9b07a6d3e37d5f45565ec449db24025d54bb7b6"} Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.465272 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerStarted","Data":"9a78a250903e07d598e88432420c861a2b78d03523ca1f130ae64e7cae2c30a0"} Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.468471 4797 generic.go:334] "Generic (PLEG): container finished" podID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerID="2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5" exitCode=0 Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.468503 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" event={"ID":"061e64ec-34bd-4c16-8afd-8b03537455ad","Type":"ContainerDied","Data":"2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5"} Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.771562 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.840789 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:50 crc kubenswrapper[4797]: I0104 12:09:50.957404 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.012830 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.347391 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.402331 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.554331 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" event={"ID":"061e64ec-34bd-4c16-8afd-8b03537455ad","Type":"ContainerStarted","Data":"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88"} Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.554382 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.554395 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.577440 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerStarted","Data":"b8fa19a269a0b8d255a93c63ca49aa2833622d24c07c7e911c9c0bc1714e3354"} Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.586464 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerStarted","Data":"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c"} Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.597525 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" podStartSLOduration=4.597512921 podStartE2EDuration="4.597512921s" podCreationTimestamp="2026-01-04 12:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:51.593898466 +0000 UTC m=+1290.451085175" watchObservedRunningTime="2026-01-04 12:09:51.597512921 +0000 UTC m=+1290.454699630" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.608639 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerStarted","Data":"82da6920fae7841a168988c98103f1142bc6deaa632d2c919def4424335d4556"} Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.608803 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.713889 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.714170 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-569f46ffb4-b7hph" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-api" containerID="cri-o://39cd871a2cdd33c13df6a2d820e492302dd31e6705fb1d98b69638efa2161416" gracePeriod=30 Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.714596 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-569f46ffb4-b7hph" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-httpd" containerID="cri-o://b6e4f4c9fc24c93108e20d30e67e1eacd46dba1b62cf51fbfafb10e51011ab52" gracePeriod=30 Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.714888 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.714850353 podStartE2EDuration="4.714850353s" podCreationTimestamp="2026-01-04 12:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:51.682624584 +0000 UTC m=+1290.539811303" watchObservedRunningTime="2026-01-04 12:09:51.714850353 +0000 UTC m=+1290.572037062" Jan 04 12:09:51 crc kubenswrapper[4797]: I0104 12:09:51.941918 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.023763 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.317163 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.619731 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerStarted","Data":"2bc4aae7b3ce0614d2e8a17e60775eb92800d89a4ce495834ac50f343d8b05b7"} Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.619828 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-log" containerID="cri-o://b8fa19a269a0b8d255a93c63ca49aa2833622d24c07c7e911c9c0bc1714e3354" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.619912 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-httpd" containerID="cri-o://2bc4aae7b3ce0614d2e8a17e60775eb92800d89a4ce495834ac50f343d8b05b7" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.623570 4797 generic.go:334] "Generic (PLEG): container finished" podID="298c44d2-329d-4207-b334-af984980c565" containerID="b6e4f4c9fc24c93108e20d30e67e1eacd46dba1b62cf51fbfafb10e51011ab52" exitCode=0 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.623632 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerDied","Data":"b6e4f4c9fc24c93108e20d30e67e1eacd46dba1b62cf51fbfafb10e51011ab52"} Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.627840 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-log" containerID="cri-o://07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.628171 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerStarted","Data":"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b"} Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.628361 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="cinder-scheduler" containerID="cri-o://52c365e971560c89bb0a88661d1e1a6b7673e0f78e38552e4b757f799cae7f5c" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.628931 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-httpd" containerID="cri-o://ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.629100 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="probe" containerID="cri-o://faa10c4e936f21cacd7a55a8b34ae899bc8568cb03f8bbf7c8fdacc764b81f46" gracePeriod=30 Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.649084 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.649068799 podStartE2EDuration="5.649068799s" podCreationTimestamp="2026-01-04 12:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:52.638137901 +0000 UTC m=+1291.495324630" watchObservedRunningTime="2026-01-04 12:09:52.649068799 +0000 UTC m=+1291.506255508" Jan 04 12:09:52 crc kubenswrapper[4797]: I0104 12:09:52.672216 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.672200549 podStartE2EDuration="5.672200549s" podCreationTimestamp="2026-01-04 12:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:52.670295019 +0000 UTC m=+1291.527481728" watchObservedRunningTime="2026-01-04 12:09:52.672200549 +0000 UTC m=+1291.529387248" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.377328 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493599 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493656 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493705 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493796 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493841 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493902 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.493933 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssb7c\" (UniqueName: \"kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c\") pod \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\" (UID: \"d25fa4b3-4cb6-44bc-9136-7a27a6462335\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.495071 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.495090 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs" (OuterVolumeSpecName: "logs") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.500162 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts" (OuterVolumeSpecName: "scripts") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.501150 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c" (OuterVolumeSpecName: "kube-api-access-ssb7c") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "kube-api-access-ssb7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.502906 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.554208 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.584663 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data" (OuterVolumeSpecName: "config-data") pod "d25fa4b3-4cb6-44bc-9136-7a27a6462335" (UID: "d25fa4b3-4cb6-44bc-9136-7a27a6462335"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.596496 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.597802 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.597910 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.598162 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d25fa4b3-4cb6-44bc-9136-7a27a6462335-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.598257 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.598371 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d25fa4b3-4cb6-44bc-9136-7a27a6462335-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.598457 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssb7c\" (UniqueName: \"kubernetes.io/projected/d25fa4b3-4cb6-44bc-9136-7a27a6462335-kube-api-access-ssb7c\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.626491 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.641809 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerID="faa10c4e936f21cacd7a55a8b34ae899bc8568cb03f8bbf7c8fdacc764b81f46" exitCode=0 Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.641903 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerDied","Data":"faa10c4e936f21cacd7a55a8b34ae899bc8568cb03f8bbf7c8fdacc764b81f46"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.652853 4797 generic.go:334] "Generic (PLEG): container finished" podID="fc671b71-676e-42ea-b027-5c64676f8192" containerID="2bc4aae7b3ce0614d2e8a17e60775eb92800d89a4ce495834ac50f343d8b05b7" exitCode=0 Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.652904 4797 generic.go:334] "Generic (PLEG): container finished" podID="fc671b71-676e-42ea-b027-5c64676f8192" containerID="b8fa19a269a0b8d255a93c63ca49aa2833622d24c07c7e911c9c0bc1714e3354" exitCode=143 Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.652956 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerDied","Data":"2bc4aae7b3ce0614d2e8a17e60775eb92800d89a4ce495834ac50f343d8b05b7"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.653014 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerDied","Data":"b8fa19a269a0b8d255a93c63ca49aa2833622d24c07c7e911c9c0bc1714e3354"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.654884 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656156 4797 generic.go:334] "Generic (PLEG): container finished" podID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerID="ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" exitCode=0 Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656185 4797 generic.go:334] "Generic (PLEG): container finished" podID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerID="07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" exitCode=143 Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656207 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerDied","Data":"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656234 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerDied","Data":"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656234 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656258 4797 scope.go:117] "RemoveContainer" containerID="ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.656246 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d25fa4b3-4cb6-44bc-9136-7a27a6462335","Type":"ContainerDied","Data":"ef3423ef3ff9696e243a312be9b07a6d3e37d5f45565ec449db24025d54bb7b6"} Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.694631 4797 scope.go:117] "RemoveContainer" containerID="07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.711711 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.723210 4797 scope.go:117] "RemoveContainer" containerID="ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.746225 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.766733 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b\": container with ID starting with ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b not found: ID does not exist" containerID="ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.766887 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b"} err="failed to get container status \"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b\": rpc error: code = NotFound desc = could not find container \"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b\": container with ID starting with ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b not found: ID does not exist" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.766918 4797 scope.go:117] "RemoveContainer" containerID="07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.774100 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c\": container with ID starting with 07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c not found: ID does not exist" containerID="07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.774152 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c"} err="failed to get container status \"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c\": rpc error: code = NotFound desc = could not find container \"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c\": container with ID starting with 07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c not found: ID does not exist" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.774179 4797 scope.go:117] "RemoveContainer" containerID="ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.775747 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b"} err="failed to get container status \"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b\": rpc error: code = NotFound desc = could not find container \"ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b\": container with ID starting with ef77df7da05b574575d4a1c8e98ec285f83483631f0b90cc225d4d9d2f214e1b not found: ID does not exist" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.775769 4797 scope.go:117] "RemoveContainer" containerID="07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.779626 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c"} err="failed to get container status \"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c\": rpc error: code = NotFound desc = could not find container \"07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c\": container with ID starting with 07e9aac7eeadb3bf6ba8d9f903521929f2abfae34b97bb24f443c664f69a3f6c not found: ID does not exist" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.788789 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813421 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813516 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc6ch\" (UniqueName: \"kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813548 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813889 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813925 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.813967 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.814062 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run\") pod \"fc671b71-676e-42ea-b027-5c64676f8192\" (UID: \"fc671b71-676e-42ea-b027-5c64676f8192\") " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.814748 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs" (OuterVolumeSpecName: "logs") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.815026 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.817066 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.817472 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.817692 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.817735 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.817744 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.817755 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.817781 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: E0104 12:09:53.817803 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.817811 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.818107 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.818126 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.818140 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-log" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.818171 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc671b71-676e-42ea-b027-5c64676f8192" containerName="glance-httpd" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.819424 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.820394 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.820475 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts" (OuterVolumeSpecName: "scripts") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.823533 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.828259 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.832162 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.835690 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch" (OuterVolumeSpecName: "kube-api-access-gc6ch") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "kube-api-access-gc6ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.854171 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.890722 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data" (OuterVolumeSpecName: "config-data") pod "fc671b71-676e-42ea-b027-5c64676f8192" (UID: "fc671b71-676e-42ea-b027-5c64676f8192"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.916374 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.916530 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.916669 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.916891 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917074 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917203 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917434 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtqrg\" (UniqueName: \"kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917532 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917629 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917660 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917679 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc6ch\" (UniqueName: \"kubernetes.io/projected/fc671b71-676e-42ea-b027-5c64676f8192-kube-api-access-gc6ch\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917698 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917733 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917752 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc671b71-676e-42ea-b027-5c64676f8192-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.917770 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc671b71-676e-42ea-b027-5c64676f8192-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:53 crc kubenswrapper[4797]: I0104 12:09:53.955872 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019420 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019584 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtqrg\" (UniqueName: \"kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019617 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019638 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019657 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019680 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019724 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019762 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.019808 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.020460 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.020736 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.020817 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.024838 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.025232 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.043700 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.044091 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.047735 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtqrg\" (UniqueName: \"kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.064316 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.253570 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.339267 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.576656 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.660904 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.661145 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api-log" containerID="cri-o://24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c" gracePeriod=30 Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.661334 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api" containerID="cri-o://b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09" gracePeriod=30 Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.694277 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.694278 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fc671b71-676e-42ea-b027-5c64676f8192","Type":"ContainerDied","Data":"17a586288c4e9f3a6a9be748b2c96e63c91beaf6d5a6b148be035ae82289cceb"} Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.694541 4797 scope.go:117] "RemoveContainer" containerID="2bc4aae7b3ce0614d2e8a17e60775eb92800d89a4ce495834ac50f343d8b05b7" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.743622 4797 scope.go:117] "RemoveContainer" containerID="b8fa19a269a0b8d255a93c63ca49aa2833622d24c07c7e911c9c0bc1714e3354" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.744349 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.776150 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.814513 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.816178 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.819304 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.819749 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.834652 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.913343 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953692 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6lmt\" (UniqueName: \"kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953749 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953787 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953817 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953847 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953863 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.953905 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:54 crc kubenswrapper[4797]: I0104 12:09:54.954154 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055495 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055537 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055606 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6lmt\" (UniqueName: \"kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055631 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055660 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055688 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055716 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.055730 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.057002 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.057401 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.057754 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.062978 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.063781 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.094093 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.115952 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6lmt\" (UniqueName: \"kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.118046 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.145222 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.162659 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.487740 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d25fa4b3-4cb6-44bc-9136-7a27a6462335" path="/var/lib/kubelet/pods/d25fa4b3-4cb6-44bc-9136-7a27a6462335/volumes" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.488894 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc671b71-676e-42ea-b027-5c64676f8192" path="/var/lib/kubelet/pods/fc671b71-676e-42ea-b027-5c64676f8192/volumes" Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.707427 4797 generic.go:334] "Generic (PLEG): container finished" podID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerID="24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c" exitCode=143 Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.707542 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerDied","Data":"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c"} Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.711328 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerID="52c365e971560c89bb0a88661d1e1a6b7673e0f78e38552e4b757f799cae7f5c" exitCode=0 Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.711372 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerDied","Data":"52c365e971560c89bb0a88661d1e1a6b7673e0f78e38552e4b757f799cae7f5c"} Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.712575 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerStarted","Data":"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15"} Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.712600 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerStarted","Data":"5c7f41dd8f931cafa043b2d1e43238451337b0d677e6f1371f4a14bba2e547b8"} Jan 04 12:09:55 crc kubenswrapper[4797]: I0104 12:09:55.873248 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.016503 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.017565 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.021601 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.021937 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.026077 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zt9ql" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.039192 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.120616 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.120681 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.120700 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.120852 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb8rw\" (UniqueName: \"kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.212598 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.222977 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.223081 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.223102 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.223138 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb8rw\" (UniqueName: \"kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.224320 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.231838 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.233939 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.246205 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb8rw\" (UniqueName: \"kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw\") pod \"openstackclient\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324680 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324746 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324776 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324893 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m5dh\" (UniqueName: \"kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324948 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324937 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.324980 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data\") pod \"ef86c719-3bd5-4c01-9038-5fe464f0058d\" (UID: \"ef86c719-3bd5-4c01-9038-5fe464f0058d\") " Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.326009 4797 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef86c719-3bd5-4c01-9038-5fe464f0058d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.333324 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts" (OuterVolumeSpecName: "scripts") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.341161 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.342400 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh" (OuterVolumeSpecName: "kube-api-access-4m5dh") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "kube-api-access-4m5dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.359574 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.404927 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.427905 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.427937 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.427949 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m5dh\" (UniqueName: \"kubernetes.io/projected/ef86c719-3bd5-4c01-9038-5fe464f0058d-kube-api-access-4m5dh\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.427960 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.506453 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data" (OuterVolumeSpecName: "config-data") pod "ef86c719-3bd5-4c01-9038-5fe464f0058d" (UID: "ef86c719-3bd5-4c01-9038-5fe464f0058d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.548815 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef86c719-3bd5-4c01-9038-5fe464f0058d-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.725034 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerStarted","Data":"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989"} Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.725514 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerStarted","Data":"725ee05498b3384ec6889ea6afdfbcabcb8c4c893d3874fd7cb34a0be4152a5b"} Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.728005 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ef86c719-3bd5-4c01-9038-5fe464f0058d","Type":"ContainerDied","Data":"c7f6ed60cff4dde820294d39b579f0af4aa3677e17cf9b6498cb505f06798f06"} Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.728048 4797 scope.go:117] "RemoveContainer" containerID="faa10c4e936f21cacd7a55a8b34ae899bc8568cb03f8bbf7c8fdacc764b81f46" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.728199 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.763578 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerStarted","Data":"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46"} Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.796556 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:09:56 crc kubenswrapper[4797]: E0104 12:09:56.796932 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="probe" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.796945 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="probe" Jan 04 12:09:56 crc kubenswrapper[4797]: E0104 12:09:56.796976 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="cinder-scheduler" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.797006 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="cinder-scheduler" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.797172 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="cinder-scheduler" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.797191 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" containerName="probe" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.798073 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.801875 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.802132 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.802285 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.808780 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.808756006 podStartE2EDuration="3.808756006s" podCreationTimestamp="2026-01-04 12:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:56.793633758 +0000 UTC m=+1295.650820467" watchObservedRunningTime="2026-01-04 12:09:56.808756006 +0000 UTC m=+1295.665942715" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.822652 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.828199 4797 scope.go:117] "RemoveContainer" containerID="52c365e971560c89bb0a88661d1e1a6b7673e0f78e38552e4b757f799cae7f5c" Jan 04 12:09:56 crc kubenswrapper[4797]: W0104 12:09:56.906710 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea5de376_7b66_40d0_8565_c8a34961540b.slice/crio-12058c4a0906fc2a08ff65ffb16bd6d5e11721e2d8738c74b84b3e1db759452a WatchSource:0}: Error finding container 12058c4a0906fc2a08ff65ffb16bd6d5e11721e2d8738c74b84b3e1db759452a: Status 404 returned error can't find the container with id 12058c4a0906fc2a08ff65ffb16bd6d5e11721e2d8738c74b84b3e1db759452a Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.924640 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.942835 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.956663 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.962899 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgc5d\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.963114 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.963178 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.963262 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.963304 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.966881 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.966962 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.966978 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.973295 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.974675 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.978518 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 04 12:09:56 crc kubenswrapper[4797]: I0104 12:09:56.989064 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075382 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgc5d\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075666 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075694 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075736 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075767 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075796 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075813 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.075831 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.076695 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.076907 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.088840 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.093047 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.099234 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.099960 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.101971 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgc5d\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.103322 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift\") pod \"swift-proxy-5746959b69-brph4\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.176355 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177284 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177354 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177388 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4bht\" (UniqueName: \"kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177432 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177653 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.177760 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279523 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4bht\" (UniqueName: \"kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279575 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279645 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279706 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279739 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279763 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.279840 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.288803 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.289845 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.290411 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.301438 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.301516 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4bht\" (UniqueName: \"kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht\") pod \"cinder-scheduler-0\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.418280 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.504322 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef86c719-3bd5-4c01-9038-5fe464f0058d" path="/var/lib/kubelet/pods/ef86c719-3bd5-4c01-9038-5fe464f0058d/volumes" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.725800 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:09:57 crc kubenswrapper[4797]: W0104 12:09:57.734771 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc044e46d_b79e_4f22_be2d_98408745d63a.slice/crio-828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0 WatchSource:0}: Error finding container 828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0: Status 404 returned error can't find the container with id 828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0 Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.828482 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerStarted","Data":"828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0"} Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.855026 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerStarted","Data":"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898"} Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.872142 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ea5de376-7b66-40d0-8565-c8a34961540b","Type":"ContainerStarted","Data":"12058c4a0906fc2a08ff65ffb16bd6d5e11721e2d8738c74b84b3e1db759452a"} Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.912178 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.912152781 podStartE2EDuration="3.912152781s" podCreationTimestamp="2026-01-04 12:09:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:57.88517519 +0000 UTC m=+1296.742361889" watchObservedRunningTime="2026-01-04 12:09:57.912152781 +0000 UTC m=+1296.769339490" Jan 04 12:09:57 crc kubenswrapper[4797]: I0104 12:09:57.970312 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.218145 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.308363 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.308580 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="dnsmasq-dns" containerID="cri-o://768398bf394f51dda85b437f4cbfc44778f062e261e56f2697dc1c9e7ba28eca" gracePeriod=10 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.352076 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.453178 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.453453 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-central-agent" containerID="cri-o://6494e67ca81405b9e8f0d2cdc8b4a8992457dd364e1e91cec429b0d77957ad15" gracePeriod=30 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.454093 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="proxy-httpd" containerID="cri-o://71e576ed5630817ceaae48a3176ddc4109c71e8b7b908c72170ef6f0e9a4f7e6" gracePeriod=30 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.454161 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="sg-core" containerID="cri-o://da45682938b825cad4ca44b6d16c43030f0be18a5ad1272afb8461dde57df4a1" gracePeriod=30 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.454202 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-notification-agent" containerID="cri-o://1f780000cda1f9e5d097d614a4f6b4fc2b3a9e1ee1d4fb67355fe56852e25e6e" gracePeriod=30 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.474023 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.511806 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs\") pod \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512194 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle\") pod \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512252 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data\") pod \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512294 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwh6l\" (UniqueName: \"kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l\") pod \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512384 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs" (OuterVolumeSpecName: "logs") pod "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" (UID: "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512438 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom\") pod \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\" (UID: \"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb\") " Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.512912 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.520646 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l" (OuterVolumeSpecName: "kube-api-access-gwh6l") pod "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" (UID: "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb"). InnerVolumeSpecName "kube-api-access-gwh6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.537206 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" (UID: "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.594132 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" (UID: "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.599532 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data" (OuterVolumeSpecName: "config-data") pod "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" (UID: "09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.615078 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.615113 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.615127 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwh6l\" (UniqueName: \"kubernetes.io/projected/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-kube-api-access-gwh6l\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.615138 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.947349 4797 generic.go:334] "Generic (PLEG): container finished" podID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerID="b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09" exitCode=0 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.947763 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerDied","Data":"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.947793 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" event={"ID":"09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb","Type":"ContainerDied","Data":"f6cdcfa811c66ba4b4963d81cf42a3c55a3ff9f045dd5c22c35dcf0c8478c30a"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.947809 4797 scope.go:117] "RemoveContainer" containerID="b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.947937 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dc4bd8cbd-x28kr" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.965946 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerID="71e576ed5630817ceaae48a3176ddc4109c71e8b7b908c72170ef6f0e9a4f7e6" exitCode=0 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.965979 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerID="da45682938b825cad4ca44b6d16c43030f0be18a5ad1272afb8461dde57df4a1" exitCode=2 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.966037 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerDied","Data":"71e576ed5630817ceaae48a3176ddc4109c71e8b7b908c72170ef6f0e9a4f7e6"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.966065 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerDied","Data":"da45682938b825cad4ca44b6d16c43030f0be18a5ad1272afb8461dde57df4a1"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.967923 4797 generic.go:334] "Generic (PLEG): container finished" podID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerID="768398bf394f51dda85b437f4cbfc44778f062e261e56f2697dc1c9e7ba28eca" exitCode=0 Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.967968 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" event={"ID":"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f","Type":"ContainerDied","Data":"768398bf394f51dda85b437f4cbfc44778f062e261e56f2697dc1c9e7ba28eca"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.969128 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerStarted","Data":"418b73115c456075c6de1d26ac6b118315cd9948a61c16d32530742173952ea4"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.975802 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerStarted","Data":"b817e46fd4e00fefeca15f786375e9089980050aceb8fad138a6fef75a80c940"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.975844 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.975855 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerStarted","Data":"c24b57917dd5aa7e5ce4a5adee1907c75b34a49d88fc6eb6757c983dec5cfd3a"} Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.975865 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:09:58 crc kubenswrapper[4797]: I0104 12:09:58.994970 4797 scope.go:117] "RemoveContainer" containerID="24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.002302 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.021488 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5dc4bd8cbd-x28kr"] Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.025733 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5746959b69-brph4" podStartSLOduration=3.025713652 podStartE2EDuration="3.025713652s" podCreationTimestamp="2026-01-04 12:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:59.005658924 +0000 UTC m=+1297.862845633" watchObservedRunningTime="2026-01-04 12:09:59.025713652 +0000 UTC m=+1297.882900361" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.180826 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.254903 4797 scope.go:117] "RemoveContainer" containerID="b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09" Jan 04 12:09:59 crc kubenswrapper[4797]: E0104 12:09:59.255539 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09\": container with ID starting with b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09 not found: ID does not exist" containerID="b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.255567 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09"} err="failed to get container status \"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09\": rpc error: code = NotFound desc = could not find container \"b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09\": container with ID starting with b21c93c95d5f69976ab7f6836a72505244442e5f1d0822b444bafd56a0c98d09 not found: ID does not exist" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.255588 4797 scope.go:117] "RemoveContainer" containerID="24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c" Jan 04 12:09:59 crc kubenswrapper[4797]: E0104 12:09:59.255926 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c\": container with ID starting with 24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c not found: ID does not exist" containerID="24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.255941 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c"} err="failed to get container status \"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c\": rpc error: code = NotFound desc = could not find container \"24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c\": container with ID starting with 24afe1388391a6ecfb4c6852d64b4f00e8707ee8a02b1ee801a1d7c29fcbda2c not found: ID does not exist" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325673 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlqxq\" (UniqueName: \"kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325744 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325784 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325899 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325935 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.325998 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb\") pod \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\" (UID: \"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f\") " Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.332358 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq" (OuterVolumeSpecName: "kube-api-access-xlqxq") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "kube-api-access-xlqxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.376257 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.389589 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.407665 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.409882 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.413750 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config" (OuterVolumeSpecName: "config") pod "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" (UID: "54e53ee7-baf9-4c90-b1ec-0752a3ecb66f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427701 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlqxq\" (UniqueName: \"kubernetes.io/projected/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-kube-api-access-xlqxq\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427731 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427741 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427750 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427758 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.427767 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:59 crc kubenswrapper[4797]: I0104 12:09:59.486958 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" path="/var/lib/kubelet/pods/09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb/volumes" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.000199 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerID="6494e67ca81405b9e8f0d2cdc8b4a8992457dd364e1e91cec429b0d77957ad15" exitCode=0 Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.000292 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerDied","Data":"6494e67ca81405b9e8f0d2cdc8b4a8992457dd364e1e91cec429b0d77957ad15"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.005320 4797 generic.go:334] "Generic (PLEG): container finished" podID="298c44d2-329d-4207-b334-af984980c565" containerID="39cd871a2cdd33c13df6a2d820e492302dd31e6705fb1d98b69638efa2161416" exitCode=0 Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.005404 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerDied","Data":"39cd871a2cdd33c13df6a2d820e492302dd31e6705fb1d98b69638efa2161416"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.005436 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f46ffb4-b7hph" event={"ID":"298c44d2-329d-4207-b334-af984980c565","Type":"ContainerDied","Data":"626d7d88529d7b29071edfac1503cc650c4455e600bac509ae6256dac0d5c888"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.005451 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="626d7d88529d7b29071edfac1503cc650c4455e600bac509ae6256dac0d5c888" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.007245 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" event={"ID":"54e53ee7-baf9-4c90-b1ec-0752a3ecb66f","Type":"ContainerDied","Data":"0b7ef079ccfcc5dd325cec9d362a79f807610051a5e39d6e4a48339e282a7713"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.007290 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75b8bc4b57-m8l7k" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.007309 4797 scope.go:117] "RemoveContainer" containerID="768398bf394f51dda85b437f4cbfc44778f062e261e56f2697dc1c9e7ba28eca" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.017226 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerStarted","Data":"1881930c5a9519ddc49465313777fe56cca8210c13be4a63ce3f9a3424e5e91c"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.017263 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerStarted","Data":"1105f111e259358a7b341195cb86cefe7de4dadda4bb97857fb143d53d1d387e"} Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.026281 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.035170 4797 scope.go:117] "RemoveContainer" containerID="321419da87541bdfc25ccdaa780c8344bc2abebbdd0d71e2ab565d0208150fcb" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.040520 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.061325 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75b8bc4b57-m8l7k"] Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.077823 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.077807165 podStartE2EDuration="4.077807165s" podCreationTimestamp="2026-01-04 12:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:00.076425828 +0000 UTC m=+1298.933612537" watchObservedRunningTime="2026-01-04 12:10:00.077807165 +0000 UTC m=+1298.934993874" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.145872 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config\") pod \"298c44d2-329d-4207-b334-af984980c565\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.145943 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config\") pod \"298c44d2-329d-4207-b334-af984980c565\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.145970 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9l7l\" (UniqueName: \"kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l\") pod \"298c44d2-329d-4207-b334-af984980c565\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.146011 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle\") pod \"298c44d2-329d-4207-b334-af984980c565\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.146027 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs\") pod \"298c44d2-329d-4207-b334-af984980c565\" (UID: \"298c44d2-329d-4207-b334-af984980c565\") " Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.163408 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l" (OuterVolumeSpecName: "kube-api-access-x9l7l") pod "298c44d2-329d-4207-b334-af984980c565" (UID: "298c44d2-329d-4207-b334-af984980c565"). InnerVolumeSpecName "kube-api-access-x9l7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.177229 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "298c44d2-329d-4207-b334-af984980c565" (UID: "298c44d2-329d-4207-b334-af984980c565"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.230112 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "298c44d2-329d-4207-b334-af984980c565" (UID: "298c44d2-329d-4207-b334-af984980c565"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.234701 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config" (OuterVolumeSpecName: "config") pod "298c44d2-329d-4207-b334-af984980c565" (UID: "298c44d2-329d-4207-b334-af984980c565"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.250025 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.250050 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.250062 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9l7l\" (UniqueName: \"kubernetes.io/projected/298c44d2-329d-4207-b334-af984980c565-kube-api-access-x9l7l\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.250070 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.255114 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "298c44d2-329d-4207-b334-af984980c565" (UID: "298c44d2-329d-4207-b334-af984980c565"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.351197 4797 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/298c44d2-329d-4207-b334-af984980c565-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:00 crc kubenswrapper[4797]: I0104 12:10:00.523145 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 04 12:10:01 crc kubenswrapper[4797]: I0104 12:10:01.036059 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f46ffb4-b7hph" Jan 04 12:10:01 crc kubenswrapper[4797]: I0104 12:10:01.069449 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:10:01 crc kubenswrapper[4797]: I0104 12:10:01.076783 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-569f46ffb4-b7hph"] Jan 04 12:10:01 crc kubenswrapper[4797]: I0104 12:10:01.488220 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="298c44d2-329d-4207-b334-af984980c565" path="/var/lib/kubelet/pods/298c44d2-329d-4207-b334-af984980c565/volumes" Jan 04 12:10:01 crc kubenswrapper[4797]: I0104 12:10:01.488879 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" path="/var/lib/kubelet/pods/54e53ee7-baf9-4c90-b1ec-0752a3ecb66f/volumes" Jan 04 12:10:02 crc kubenswrapper[4797]: I0104 12:10:02.419538 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.065560 4797 generic.go:334] "Generic (PLEG): container finished" podID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerID="1f780000cda1f9e5d097d614a4f6b4fc2b3a9e1ee1d4fb67355fe56852e25e6e" exitCode=0 Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.065645 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerDied","Data":"1f780000cda1f9e5d097d614a4f6b4fc2b3a9e1ee1d4fb67355fe56852e25e6e"} Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.253787 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.253845 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.294240 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:04 crc kubenswrapper[4797]: I0104 12:10:04.296853 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.073560 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.073873 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.164272 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.164312 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.212219 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:10:05 crc kubenswrapper[4797]: I0104 12:10:05.212275 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:10:06 crc kubenswrapper[4797]: I0104 12:10:06.082846 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:10:06 crc kubenswrapper[4797]: I0104 12:10:06.083052 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.185746 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.186812 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.385743 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.385829 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.437922 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:07 crc kubenswrapper[4797]: I0104 12:10:07.760047 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 04 12:10:08 crc kubenswrapper[4797]: I0104 12:10:08.108026 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:08 crc kubenswrapper[4797]: I0104 12:10:08.108363 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:08 crc kubenswrapper[4797]: I0104 12:10:08.286197 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:09 crc kubenswrapper[4797]: I0104 12:10:09.065144 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:09 crc kubenswrapper[4797]: I0104 12:10:09.065399 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" containerName="kube-state-metrics" containerID="cri-o://5cf863e6c4e567a92860c363b3a8fa6be4741f27258593aecd9da7c965fb66a2" gracePeriod=30 Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.113202 4797 generic.go:334] "Generic (PLEG): container finished" podID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" containerID="5cf863e6c4e567a92860c363b3a8fa6be4741f27258593aecd9da7c965fb66a2" exitCode=2 Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.113558 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"681bdc49-0a76-4a29-b7c0-1f4e051636fb","Type":"ContainerDied","Data":"5cf863e6c4e567a92860c363b3a8fa6be4741f27258593aecd9da7c965fb66a2"} Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.342690 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.474468 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480187 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480258 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480308 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480407 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd5g7\" (UniqueName: \"kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480434 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480520 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.480553 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd\") pod \"ef8e3a41-8586-4918-b62a-0e1b49eab563\" (UID: \"ef8e3a41-8586-4918-b62a-0e1b49eab563\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.481330 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.482438 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.488645 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7" (OuterVolumeSpecName: "kube-api-access-zd5g7") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "kube-api-access-zd5g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.501711 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts" (OuterVolumeSpecName: "scripts") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.508553 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.575150 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.582694 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc2zn\" (UniqueName: \"kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn\") pod \"681bdc49-0a76-4a29-b7c0-1f4e051636fb\" (UID: \"681bdc49-0a76-4a29-b7c0-1f4e051636fb\") " Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583122 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583146 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583159 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd5g7\" (UniqueName: \"kubernetes.io/projected/ef8e3a41-8586-4918-b62a-0e1b49eab563-kube-api-access-zd5g7\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583172 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583183 4797 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.583190 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef8e3a41-8586-4918-b62a-0e1b49eab563-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.585288 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn" (OuterVolumeSpecName: "kube-api-access-jc2zn") pod "681bdc49-0a76-4a29-b7c0-1f4e051636fb" (UID: "681bdc49-0a76-4a29-b7c0-1f4e051636fb"). InnerVolumeSpecName "kube-api-access-jc2zn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.596338 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data" (OuterVolumeSpecName: "config-data") pod "ef8e3a41-8586-4918-b62a-0e1b49eab563" (UID: "ef8e3a41-8586-4918-b62a-0e1b49eab563"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.685312 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc2zn\" (UniqueName: \"kubernetes.io/projected/681bdc49-0a76-4a29-b7c0-1f4e051636fb-kube-api-access-jc2zn\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:10 crc kubenswrapper[4797]: I0104 12:10:10.685345 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef8e3a41-8586-4918-b62a-0e1b49eab563-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.127595 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"681bdc49-0a76-4a29-b7c0-1f4e051636fb","Type":"ContainerDied","Data":"854343d9ae5b1d91fb65167e001cc57e27f45cecb00e67a23ec93771a6401a24"} Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.127909 4797 scope.go:117] "RemoveContainer" containerID="5cf863e6c4e567a92860c363b3a8fa6be4741f27258593aecd9da7c965fb66a2" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.127640 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.150335 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef8e3a41-8586-4918-b62a-0e1b49eab563","Type":"ContainerDied","Data":"432d603f2c60fc65a6d0bd18d71fa0ab03a4078476c0d6dddb9419629aa44da3"} Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.150407 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.167551 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ea5de376-7b66-40d0-8565-c8a34961540b","Type":"ContainerStarted","Data":"a95bc6ae38195a799c2eec6d7e3b1fc38748ec8ad711d67378d10d8eb96a34a6"} Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.193096 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.938611748 podStartE2EDuration="16.193075759s" podCreationTimestamp="2026-01-04 12:09:55 +0000 UTC" firstStartedPulling="2026-01-04 12:09:56.923330745 +0000 UTC m=+1295.780517454" lastFinishedPulling="2026-01-04 12:10:10.177794756 +0000 UTC m=+1309.034981465" observedRunningTime="2026-01-04 12:10:11.189896015 +0000 UTC m=+1310.047082724" watchObservedRunningTime="2026-01-04 12:10:11.193075759 +0000 UTC m=+1310.050262468" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.252784 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-9k482"] Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253191 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253208 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253217 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253223 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253238 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api-log" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253246 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api-log" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253264 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="init" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253269 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="init" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253279 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="sg-core" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253285 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="sg-core" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253297 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="dnsmasq-dns" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253309 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="dnsmasq-dns" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253321 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-api" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253327 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-api" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253336 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" containerName="kube-state-metrics" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253342 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" containerName="kube-state-metrics" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253352 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-central-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253358 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-central-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253368 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="proxy-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253374 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="proxy-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: E0104 12:10:11.253384 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-notification-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253399 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-notification-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253551 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="proxy-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253562 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-notification-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253573 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-httpd" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253584 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="ceilometer-central-agent" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253595 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api-log" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253608 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" containerName="kube-state-metrics" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253617 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="sg-core" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253628 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="09fa15e9-1f16-4b21-bdcd-7e35cc7a50fb" containerName="barbican-api" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253637 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="54e53ee7-baf9-4c90-b1ec-0752a3ecb66f" containerName="dnsmasq-dns" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.253648 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="298c44d2-329d-4207-b334-af984980c565" containerName="neutron-api" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.254207 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.284163 4797 scope.go:117] "RemoveContainer" containerID="71e576ed5630817ceaae48a3176ddc4109c71e8b7b908c72170ef6f0e9a4f7e6" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.285710 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.298959 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9k482"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.305542 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89c7b\" (UniqueName: \"kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.305590 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.320045 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.340158 4797 scope.go:117] "RemoveContainer" containerID="da45682938b825cad4ca44b6d16c43030f0be18a5ad1272afb8461dde57df4a1" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.340276 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.355927 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.364452 4797 scope.go:117] "RemoveContainer" containerID="1f780000cda1f9e5d097d614a4f6b4fc2b3a9e1ee1d4fb67355fe56852e25e6e" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.369501 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.371675 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.373420 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jkpzj" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.374594 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.374874 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.374940 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.384939 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.391065 4797 scope.go:117] "RemoveContainer" containerID="6494e67ca81405b9e8f0d2cdc8b4a8992457dd364e1e91cec429b0d77957ad15" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.393387 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.394644 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.397407 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.398064 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.402519 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.406649 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89c7b\" (UniqueName: \"kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.407045 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.408218 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.412848 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-tlmxr"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.414372 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.426231 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tlmxr"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.436415 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89c7b\" (UniqueName: \"kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b\") pod \"nova-api-db-create-9k482\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.484536 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="681bdc49-0a76-4a29-b7c0-1f4e051636fb" path="/var/lib/kubelet/pods/681bdc49-0a76-4a29-b7c0-1f4e051636fb/volumes" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.486485 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" path="/var/lib/kubelet/pods/ef8e3a41-8586-4918-b62a-0e1b49eab563/volumes" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.491076 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-srpmm"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.492538 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512450 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjl49\" (UniqueName: \"kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512490 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512518 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512555 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512606 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512624 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512644 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512665 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfw52\" (UniqueName: \"kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.512804 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-efba-account-create-update-mvjk5"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.513690 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.513729 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.513945 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.514248 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.514274 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.516102 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.526936 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-srpmm"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.546265 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-efba-account-create-update-mvjk5"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.571320 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616286 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616334 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616368 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jql75\" (UniqueName: \"kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616391 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616452 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616473 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616491 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjl49\" (UniqueName: \"kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616513 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616537 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616562 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616593 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krccd\" (UniqueName: \"kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616613 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616630 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb2fg\" (UniqueName: \"kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616649 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616670 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616729 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfw52\" (UniqueName: \"kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616766 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.616797 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.622897 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.625617 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.626692 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.631696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.634947 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.635521 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.645165 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.645553 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.651653 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.653245 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.653546 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjl49\" (UniqueName: \"kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49\") pod \"ceilometer-0\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.656564 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfw52\" (UniqueName: \"kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52\") pod \"kube-state-metrics-0\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.668673 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-fc00-account-create-update-nssh5"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.678496 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.686637 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.693171 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.697769 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-fc00-account-create-update-nssh5"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.712723 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728609 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krccd\" (UniqueName: \"kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728665 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb2fg\" (UniqueName: \"kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728721 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728838 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728900 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.728962 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgzm5\" (UniqueName: \"kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.729056 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jql75\" (UniqueName: \"kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.729161 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.730106 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.730113 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.730258 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.746673 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb2fg\" (UniqueName: \"kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg\") pod \"nova-api-efba-account-create-update-mvjk5\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.750655 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krccd\" (UniqueName: \"kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd\") pod \"nova-cell0-db-create-tlmxr\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.751927 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jql75\" (UniqueName: \"kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75\") pod \"nova-cell1-db-create-srpmm\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.788245 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.816452 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.832663 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.838714 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgzm5\" (UniqueName: \"kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.838851 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.839644 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.858143 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgzm5\" (UniqueName: \"kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5\") pod \"nova-cell0-fc00-account-create-update-nssh5\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.870635 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9070-account-create-update-tf4w9"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.872054 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.876740 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.879840 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9070-account-create-update-tf4w9"] Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.941621 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:11 crc kubenswrapper[4797]: I0104 12:10:11.941678 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm2h2\" (UniqueName: \"kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.043362 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm2h2\" (UniqueName: \"kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.043517 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.044209 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.059965 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.065801 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm2h2\" (UniqueName: \"kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2\") pod \"nova-cell1-9070-account-create-update-tf4w9\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.117248 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9k482"] Jan 04 12:10:12 crc kubenswrapper[4797]: W0104 12:10:12.121869 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod255b543b_ea95_457a_a7b5_63190019b8e8.slice/crio-6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d WatchSource:0}: Error finding container 6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d: Status 404 returned error can't find the container with id 6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.196737 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.225669 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9k482" event={"ID":"255b543b-ea95-457a-a7b5-63190019b8e8","Type":"ContainerStarted","Data":"6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d"} Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.327923 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.339959 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.491671 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-efba-account-create-update-mvjk5"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.552722 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tlmxr"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.724434 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-srpmm"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.849136 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9070-account-create-update-tf4w9"] Jan 04 12:10:12 crc kubenswrapper[4797]: I0104 12:10:12.888638 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-fc00-account-create-update-nssh5"] Jan 04 12:10:12 crc kubenswrapper[4797]: W0104 12:10:12.898087 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36839b2d_0307_41a0_bf7c_c37d9280d5be.slice/crio-60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a WatchSource:0}: Error finding container 60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a: Status 404 returned error can't find the container with id 60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.241214 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"35e31bb2-dc54-40fe-843a-6a89d4e91dda","Type":"ContainerStarted","Data":"98a71f0894e170f51a1fa453465be098592028fea532f5129370897949ccf2aa"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.242884 4797 generic.go:334] "Generic (PLEG): container finished" podID="70761e1c-8caa-438d-b87d-c5f771e56ade" containerID="117875a5dbf3fa596fd25719a04e4362dc7c701dac7b932c7f73f7d2f7984393" exitCode=0 Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.242928 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tlmxr" event={"ID":"70761e1c-8caa-438d-b87d-c5f771e56ade","Type":"ContainerDied","Data":"117875a5dbf3fa596fd25719a04e4362dc7c701dac7b932c7f73f7d2f7984393"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.242968 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tlmxr" event={"ID":"70761e1c-8caa-438d-b87d-c5f771e56ade","Type":"ContainerStarted","Data":"6e35df40a22fd9946f1f1ec9c756d148c1e9133b596cabd7499861dfdfbb3072"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.245668 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerStarted","Data":"884a142602865844ae6b336635ee6f7a078933be1361dfdcc3ba9d1ae46c2e55"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.247211 4797 generic.go:334] "Generic (PLEG): container finished" podID="0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" containerID="c848b4f27d4bb207ec3d3f60c0c0cbc9f2de9c8bbbe894359e2672858f039bfa" exitCode=0 Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.247269 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-efba-account-create-update-mvjk5" event={"ID":"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee","Type":"ContainerDied","Data":"c848b4f27d4bb207ec3d3f60c0c0cbc9f2de9c8bbbe894359e2672858f039bfa"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.247349 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-efba-account-create-update-mvjk5" event={"ID":"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee","Type":"ContainerStarted","Data":"32728d1241ea6a9fce5d628c50f029ef76d2cbe184d9d15403ae9c935cd47d27"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.253248 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" event={"ID":"36839b2d-0307-41a0-bf7c-c37d9280d5be","Type":"ContainerStarted","Data":"60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.255319 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" event={"ID":"87e304fe-91cd-4ef9-841f-dd4fe87b6b35","Type":"ContainerStarted","Data":"7e1eddd1b5f4a635357f26aae57dd6695607081ce13586c804e68437a5c9a5b2"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.257099 4797 generic.go:334] "Generic (PLEG): container finished" podID="3a1a1ecb-6333-434f-a843-4541ddcc9f48" containerID="ec3131d9785fd559466d8e3cfc18069ec035951436574a4a4e09ab51a6f9b604" exitCode=0 Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.257144 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-srpmm" event={"ID":"3a1a1ecb-6333-434f-a843-4541ddcc9f48","Type":"ContainerDied","Data":"ec3131d9785fd559466d8e3cfc18069ec035951436574a4a4e09ab51a6f9b604"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.257166 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-srpmm" event={"ID":"3a1a1ecb-6333-434f-a843-4541ddcc9f48","Type":"ContainerStarted","Data":"302a7a78d6a1af7a576be468e37353df791a49981ec2a5a6075a2906e3b147a7"} Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.259892 4797 generic.go:334] "Generic (PLEG): container finished" podID="255b543b-ea95-457a-a7b5-63190019b8e8" containerID="41c17a83276d3c0e069dd494d2af4a5622ebd15603f8ccc377515d895cb90413" exitCode=0 Jan 04 12:10:13 crc kubenswrapper[4797]: I0104 12:10:13.259942 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9k482" event={"ID":"255b543b-ea95-457a-a7b5-63190019b8e8","Type":"ContainerDied","Data":"41c17a83276d3c0e069dd494d2af4a5622ebd15603f8ccc377515d895cb90413"} Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.286568 4797 generic.go:334] "Generic (PLEG): container finished" podID="87e304fe-91cd-4ef9-841f-dd4fe87b6b35" containerID="959c3fabe5eb13450f485c5aa1971f64bf108f7cbc0ac7321eca9fbcad43737f" exitCode=0 Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.286664 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" event={"ID":"87e304fe-91cd-4ef9-841f-dd4fe87b6b35","Type":"ContainerDied","Data":"959c3fabe5eb13450f485c5aa1971f64bf108f7cbc0ac7321eca9fbcad43737f"} Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.293566 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"35e31bb2-dc54-40fe-843a-6a89d4e91dda","Type":"ContainerStarted","Data":"2e3eef8a595abeb6c08dda235b265327b59e733829fd72a94fa5205605eee82c"} Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.293697 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.296371 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerStarted","Data":"4289ee5b6caba0fa5c84f85fa37399617b6ceda96b0013fd7c48e5c5242022b4"} Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.299087 4797 generic.go:334] "Generic (PLEG): container finished" podID="36839b2d-0307-41a0-bf7c-c37d9280d5be" containerID="cc5cbdf1d8378431ace1897d3cb28372fb4bfa6af945687726bc9081a3f8a25e" exitCode=0 Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.299321 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" event={"ID":"36839b2d-0307-41a0-bf7c-c37d9280d5be","Type":"ContainerDied","Data":"cc5cbdf1d8378431ace1897d3cb28372fb4bfa6af945687726bc9081a3f8a25e"} Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.348826 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.80941262 podStartE2EDuration="3.348801752s" podCreationTimestamp="2026-01-04 12:10:11 +0000 UTC" firstStartedPulling="2026-01-04 12:10:12.324584764 +0000 UTC m=+1311.181771473" lastFinishedPulling="2026-01-04 12:10:12.863973896 +0000 UTC m=+1311.721160605" observedRunningTime="2026-01-04 12:10:14.325235871 +0000 UTC m=+1313.182422580" watchObservedRunningTime="2026-01-04 12:10:14.348801752 +0000 UTC m=+1313.205988471" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.669831 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.699763 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jql75\" (UniqueName: \"kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75\") pod \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.699884 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts\") pod \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\" (UID: \"3a1a1ecb-6333-434f-a843-4541ddcc9f48\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.700939 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3a1a1ecb-6333-434f-a843-4541ddcc9f48" (UID: "3a1a1ecb-6333-434f-a843-4541ddcc9f48"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.705272 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75" (OuterVolumeSpecName: "kube-api-access-jql75") pod "3a1a1ecb-6333-434f-a843-4541ddcc9f48" (UID: "3a1a1ecb-6333-434f-a843-4541ddcc9f48"). InnerVolumeSpecName "kube-api-access-jql75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.802073 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a1a1ecb-6333-434f-a843-4541ddcc9f48-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.802118 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jql75\" (UniqueName: \"kubernetes.io/projected/3a1a1ecb-6333-434f-a843-4541ddcc9f48-kube-api-access-jql75\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.871687 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.885749 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.905172 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89c7b\" (UniqueName: \"kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b\") pod \"255b543b-ea95-457a-a7b5-63190019b8e8\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.905292 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts\") pod \"70761e1c-8caa-438d-b87d-c5f771e56ade\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.905366 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krccd\" (UniqueName: \"kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd\") pod \"70761e1c-8caa-438d-b87d-c5f771e56ade\" (UID: \"70761e1c-8caa-438d-b87d-c5f771e56ade\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.905383 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts\") pod \"255b543b-ea95-457a-a7b5-63190019b8e8\" (UID: \"255b543b-ea95-457a-a7b5-63190019b8e8\") " Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.906249 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "255b543b-ea95-457a-a7b5-63190019b8e8" (UID: "255b543b-ea95-457a-a7b5-63190019b8e8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.914277 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70761e1c-8caa-438d-b87d-c5f771e56ade" (UID: "70761e1c-8caa-438d-b87d-c5f771e56ade"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.916253 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd" (OuterVolumeSpecName: "kube-api-access-krccd") pod "70761e1c-8caa-438d-b87d-c5f771e56ade" (UID: "70761e1c-8caa-438d-b87d-c5f771e56ade"). InnerVolumeSpecName "kube-api-access-krccd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.916519 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b" (OuterVolumeSpecName: "kube-api-access-89c7b") pod "255b543b-ea95-457a-a7b5-63190019b8e8" (UID: "255b543b-ea95-457a-a7b5-63190019b8e8"). InnerVolumeSpecName "kube-api-access-89c7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[4797]: I0104 12:10:14.927943 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.006473 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb2fg\" (UniqueName: \"kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg\") pod \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.006645 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts\") pod \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\" (UID: \"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.007160 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70761e1c-8caa-438d-b87d-c5f771e56ade-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.007178 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krccd\" (UniqueName: \"kubernetes.io/projected/70761e1c-8caa-438d-b87d-c5f771e56ade-kube-api-access-krccd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.007190 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/255b543b-ea95-457a-a7b5-63190019b8e8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.007199 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89c7b\" (UniqueName: \"kubernetes.io/projected/255b543b-ea95-457a-a7b5-63190019b8e8-kube-api-access-89c7b\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.007627 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" (UID: "0bd9c381-3736-4813-87e0-eb9ebaa6b8ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.010428 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg" (OuterVolumeSpecName: "kube-api-access-zb2fg") pod "0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" (UID: "0bd9c381-3736-4813-87e0-eb9ebaa6b8ee"). InnerVolumeSpecName "kube-api-access-zb2fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.107923 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.107952 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb2fg\" (UniqueName: \"kubernetes.io/projected/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee-kube-api-access-zb2fg\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.318418 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerStarted","Data":"df6267d8ab3008e7dca79d20574da67d3fbdb36b8588822d88d4d16ad8981b1f"} Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.322667 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-efba-account-create-update-mvjk5" event={"ID":"0bd9c381-3736-4813-87e0-eb9ebaa6b8ee","Type":"ContainerDied","Data":"32728d1241ea6a9fce5d628c50f029ef76d2cbe184d9d15403ae9c935cd47d27"} Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.322756 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32728d1241ea6a9fce5d628c50f029ef76d2cbe184d9d15403ae9c935cd47d27" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.322704 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-efba-account-create-update-mvjk5" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.325146 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-srpmm" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.325138 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-srpmm" event={"ID":"3a1a1ecb-6333-434f-a843-4541ddcc9f48","Type":"ContainerDied","Data":"302a7a78d6a1af7a576be468e37353df791a49981ec2a5a6075a2906e3b147a7"} Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.325327 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="302a7a78d6a1af7a576be468e37353df791a49981ec2a5a6075a2906e3b147a7" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.327160 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9k482" event={"ID":"255b543b-ea95-457a-a7b5-63190019b8e8","Type":"ContainerDied","Data":"6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d"} Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.327200 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c3e95533eee6c8cbd65d5236c022cf8932e7ff338a6d134e69696050c9fdc6d" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.327268 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9k482" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.338543 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tlmxr" event={"ID":"70761e1c-8caa-438d-b87d-c5f771e56ade","Type":"ContainerDied","Data":"6e35df40a22fd9946f1f1ec9c756d148c1e9133b596cabd7499861dfdfbb3072"} Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.338624 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e35df40a22fd9946f1f1ec9c756d148c1e9133b596cabd7499861dfdfbb3072" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.338724 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tlmxr" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.803635 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.809714 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.932657 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgzm5\" (UniqueName: \"kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5\") pod \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.932861 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm2h2\" (UniqueName: \"kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2\") pod \"36839b2d-0307-41a0-bf7c-c37d9280d5be\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.932895 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts\") pod \"36839b2d-0307-41a0-bf7c-c37d9280d5be\" (UID: \"36839b2d-0307-41a0-bf7c-c37d9280d5be\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.932943 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts\") pod \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\" (UID: \"87e304fe-91cd-4ef9-841f-dd4fe87b6b35\") " Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.933793 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87e304fe-91cd-4ef9-841f-dd4fe87b6b35" (UID: "87e304fe-91cd-4ef9-841f-dd4fe87b6b35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.934166 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "36839b2d-0307-41a0-bf7c-c37d9280d5be" (UID: "36839b2d-0307-41a0-bf7c-c37d9280d5be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.967286 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5" (OuterVolumeSpecName: "kube-api-access-bgzm5") pod "87e304fe-91cd-4ef9-841f-dd4fe87b6b35" (UID: "87e304fe-91cd-4ef9-841f-dd4fe87b6b35"). InnerVolumeSpecName "kube-api-access-bgzm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:15 crc kubenswrapper[4797]: I0104 12:10:15.967371 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2" (OuterVolumeSpecName: "kube-api-access-nm2h2") pod "36839b2d-0307-41a0-bf7c-c37d9280d5be" (UID: "36839b2d-0307-41a0-bf7c-c37d9280d5be"). InnerVolumeSpecName "kube-api-access-nm2h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.036090 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm2h2\" (UniqueName: \"kubernetes.io/projected/36839b2d-0307-41a0-bf7c-c37d9280d5be-kube-api-access-nm2h2\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.036129 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36839b2d-0307-41a0-bf7c-c37d9280d5be-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.037479 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.037528 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgzm5\" (UniqueName: \"kubernetes.io/projected/87e304fe-91cd-4ef9-841f-dd4fe87b6b35-kube-api-access-bgzm5\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.348915 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerStarted","Data":"0e2ddbf9a7bd44afbac17c6df1376e5e608d846f49e24aaa51b0dfc68b099125"} Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.350627 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" event={"ID":"36839b2d-0307-41a0-bf7c-c37d9280d5be","Type":"ContainerDied","Data":"60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a"} Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.350669 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60e69c426b983dde25d8be08077285f6a17a5082c9cfd1430930bda7d5012f4a" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.350684 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9070-account-create-update-tf4w9" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.351906 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" event={"ID":"87e304fe-91cd-4ef9-841f-dd4fe87b6b35","Type":"ContainerDied","Data":"7e1eddd1b5f4a635357f26aae57dd6695607081ce13586c804e68437a5c9a5b2"} Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.351941 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e1eddd1b5f4a635357f26aae57dd6695607081ce13586c804e68437a5c9a5b2" Jan 04 12:10:16 crc kubenswrapper[4797]: I0104 12:10:16.352023 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fc00-account-create-update-nssh5" Jan 04 12:10:17 crc kubenswrapper[4797]: I0104 12:10:17.362859 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerStarted","Data":"2028d7b3ea55ca0534841bac5af40ec09a2dfef1f70c50fd1ee0483825907bef"} Jan 04 12:10:17 crc kubenswrapper[4797]: I0104 12:10:17.363348 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:10:17 crc kubenswrapper[4797]: I0104 12:10:17.403322 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.094875242 podStartE2EDuration="6.403301387s" podCreationTimestamp="2026-01-04 12:10:11 +0000 UTC" firstStartedPulling="2026-01-04 12:10:12.344889609 +0000 UTC m=+1311.202076318" lastFinishedPulling="2026-01-04 12:10:16.653315754 +0000 UTC m=+1315.510502463" observedRunningTime="2026-01-04 12:10:17.395747738 +0000 UTC m=+1316.252934537" watchObservedRunningTime="2026-01-04 12:10:17.403301387 +0000 UTC m=+1316.260488096" Jan 04 12:10:19 crc kubenswrapper[4797]: I0104 12:10:19.493249 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:10:19 crc kubenswrapper[4797]: I0104 12:10:19.493555 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.721864 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861211 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wjgc4"] Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861635 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70761e1c-8caa-438d-b87d-c5f771e56ade" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861654 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="70761e1c-8caa-438d-b87d-c5f771e56ade" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861672 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87e304fe-91cd-4ef9-841f-dd4fe87b6b35" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861680 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="87e304fe-91cd-4ef9-841f-dd4fe87b6b35" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861698 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="255b543b-ea95-457a-a7b5-63190019b8e8" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861705 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="255b543b-ea95-457a-a7b5-63190019b8e8" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861715 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861722 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861730 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a1a1ecb-6333-434f-a843-4541ddcc9f48" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861736 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a1a1ecb-6333-434f-a843-4541ddcc9f48" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: E0104 12:10:21.861743 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36839b2d-0307-41a0-bf7c-c37d9280d5be" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861750 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="36839b2d-0307-41a0-bf7c-c37d9280d5be" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861944 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="36839b2d-0307-41a0-bf7c-c37d9280d5be" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861961 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="70761e1c-8caa-438d-b87d-c5f771e56ade" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.861978 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="87e304fe-91cd-4ef9-841f-dd4fe87b6b35" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.862014 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="255b543b-ea95-457a-a7b5-63190019b8e8" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.862031 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a1a1ecb-6333-434f-a843-4541ddcc9f48" containerName="mariadb-database-create" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.862039 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" containerName="mariadb-account-create-update" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.862723 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.866033 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-h575v" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.866369 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.867017 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 04 12:10:21 crc kubenswrapper[4797]: I0104 12:10:21.871321 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wjgc4"] Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.052275 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.052343 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.052376 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6xww\" (UniqueName: \"kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.052482 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.153565 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.153661 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.153711 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.153745 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6xww\" (UniqueName: \"kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.160061 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.160587 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.167680 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.173294 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6xww\" (UniqueName: \"kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww\") pod \"nova-cell0-conductor-db-sync-wjgc4\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.181627 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:22 crc kubenswrapper[4797]: I0104 12:10:22.699795 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wjgc4"] Jan 04 12:10:23 crc kubenswrapper[4797]: I0104 12:10:23.427338 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" event={"ID":"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb","Type":"ContainerStarted","Data":"a6db5f1a814f783eaebb0c5a6a72c6ba5abc35ad9671db5150195f982fd22c45"} Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.029222 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.029860 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="sg-core" containerID="cri-o://0e2ddbf9a7bd44afbac17c6df1376e5e608d846f49e24aaa51b0dfc68b099125" gracePeriod=30 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.029862 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="proxy-httpd" containerID="cri-o://2028d7b3ea55ca0534841bac5af40ec09a2dfef1f70c50fd1ee0483825907bef" gracePeriod=30 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.029903 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-notification-agent" containerID="cri-o://df6267d8ab3008e7dca79d20574da67d3fbdb36b8588822d88d4d16ad8981b1f" gracePeriod=30 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.030664 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-central-agent" containerID="cri-o://4289ee5b6caba0fa5c84f85fa37399617b6ceda96b0013fd7c48e5c5242022b4" gracePeriod=30 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445206 4797 generic.go:334] "Generic (PLEG): container finished" podID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerID="2028d7b3ea55ca0534841bac5af40ec09a2dfef1f70c50fd1ee0483825907bef" exitCode=0 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445237 4797 generic.go:334] "Generic (PLEG): container finished" podID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerID="0e2ddbf9a7bd44afbac17c6df1376e5e608d846f49e24aaa51b0dfc68b099125" exitCode=2 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445249 4797 generic.go:334] "Generic (PLEG): container finished" podID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerID="4289ee5b6caba0fa5c84f85fa37399617b6ceda96b0013fd7c48e5c5242022b4" exitCode=0 Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445269 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerDied","Data":"2028d7b3ea55ca0534841bac5af40ec09a2dfef1f70c50fd1ee0483825907bef"} Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445295 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerDied","Data":"0e2ddbf9a7bd44afbac17c6df1376e5e608d846f49e24aaa51b0dfc68b099125"} Jan 04 12:10:24 crc kubenswrapper[4797]: I0104 12:10:24.445331 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerDied","Data":"4289ee5b6caba0fa5c84f85fa37399617b6ceda96b0013fd7c48e5c5242022b4"} Jan 04 12:10:26 crc kubenswrapper[4797]: I0104 12:10:26.465713 4797 generic.go:334] "Generic (PLEG): container finished" podID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerID="df6267d8ab3008e7dca79d20574da67d3fbdb36b8588822d88d4d16ad8981b1f" exitCode=0 Jan 04 12:10:26 crc kubenswrapper[4797]: I0104 12:10:26.465795 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerDied","Data":"df6267d8ab3008e7dca79d20574da67d3fbdb36b8588822d88d4d16ad8981b1f"} Jan 04 12:10:28 crc kubenswrapper[4797]: I0104 12:10:28.361333 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:28 crc kubenswrapper[4797]: I0104 12:10:28.361607 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-log" containerID="cri-o://4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989" gracePeriod=30 Jan 04 12:10:28 crc kubenswrapper[4797]: I0104 12:10:28.361708 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-httpd" containerID="cri-o://2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898" gracePeriod=30 Jan 04 12:10:28 crc kubenswrapper[4797]: I0104 12:10:28.490547 4797 generic.go:334] "Generic (PLEG): container finished" podID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerID="4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989" exitCode=143 Jan 04 12:10:28 crc kubenswrapper[4797]: I0104 12:10:28.490654 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerDied","Data":"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989"} Jan 04 12:10:29 crc kubenswrapper[4797]: I0104 12:10:29.710207 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:29 crc kubenswrapper[4797]: I0104 12:10:29.710896 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-httpd" containerID="cri-o://cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46" gracePeriod=30 Jan 04 12:10:29 crc kubenswrapper[4797]: I0104 12:10:29.711078 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-log" containerID="cri-o://6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15" gracePeriod=30 Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.392691 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.517614 4797 generic.go:334] "Generic (PLEG): container finished" podID="18040641-e171-485a-9162-977e909fd416" containerID="6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15" exitCode=143 Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.517681 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerDied","Data":"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15"} Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.521269 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8aea92db-fe3e-4665-a95b-ed9450c01ca2","Type":"ContainerDied","Data":"884a142602865844ae6b336635ee6f7a078933be1361dfdcc3ba9d1ae46c2e55"} Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.521310 4797 scope.go:117] "RemoveContainer" containerID="2028d7b3ea55ca0534841bac5af40ec09a2dfef1f70c50fd1ee0483825907bef" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.521437 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530123 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530174 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjl49\" (UniqueName: \"kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530194 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530269 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530355 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530382 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530412 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.530471 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd\") pod \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\" (UID: \"8aea92db-fe3e-4665-a95b-ed9450c01ca2\") " Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.531448 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.531873 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.536328 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49" (OuterVolumeSpecName: "kube-api-access-gjl49") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "kube-api-access-gjl49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.536348 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts" (OuterVolumeSpecName: "scripts") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.539203 4797 scope.go:117] "RemoveContainer" containerID="0e2ddbf9a7bd44afbac17c6df1376e5e608d846f49e24aaa51b0dfc68b099125" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.557741 4797 scope.go:117] "RemoveContainer" containerID="df6267d8ab3008e7dca79d20574da67d3fbdb36b8588822d88d4d16ad8981b1f" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.561336 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.584783 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.586766 4797 scope.go:117] "RemoveContainer" containerID="4289ee5b6caba0fa5c84f85fa37399617b6ceda96b0013fd7c48e5c5242022b4" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.621923 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.627345 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data" (OuterVolumeSpecName: "config-data") pod "8aea92db-fe3e-4665-a95b-ed9450c01ca2" (UID: "8aea92db-fe3e-4665-a95b-ed9450c01ca2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633293 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633334 4797 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633346 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633356 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633363 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8aea92db-fe3e-4665-a95b-ed9450c01ca2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633372 4797 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633382 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjl49\" (UniqueName: \"kubernetes.io/projected/8aea92db-fe3e-4665-a95b-ed9450c01ca2-kube-api-access-gjl49\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:30 crc kubenswrapper[4797]: I0104 12:10:30.633394 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aea92db-fe3e-4665-a95b-ed9450c01ca2-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.538218 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" event={"ID":"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb","Type":"ContainerStarted","Data":"a7c056981801443d31bb93252636ab9e089edd5eeabc8686d37b7b291f4eac9c"} Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.826377 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.847486 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.860815 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:31 crc kubenswrapper[4797]: E0104 12:10:31.861265 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="sg-core" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861291 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="sg-core" Jan 04 12:10:31 crc kubenswrapper[4797]: E0104 12:10:31.861308 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-notification-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861316 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-notification-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: E0104 12:10:31.861327 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="proxy-httpd" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861333 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="proxy-httpd" Jan 04 12:10:31 crc kubenswrapper[4797]: E0104 12:10:31.861370 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-central-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861379 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-central-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861571 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-notification-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861600 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="sg-core" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861616 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="ceilometer-central-agent" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.861637 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" containerName="proxy-httpd" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.863630 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.865288 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.865979 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.866237 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.866560 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" podStartSLOduration=3.184993413 podStartE2EDuration="10.866543819s" podCreationTimestamp="2026-01-04 12:10:21 +0000 UTC" firstStartedPulling="2026-01-04 12:10:22.70175891 +0000 UTC m=+1321.558945609" lastFinishedPulling="2026-01-04 12:10:30.383309306 +0000 UTC m=+1329.240496015" observedRunningTime="2026-01-04 12:10:31.839794975 +0000 UTC m=+1330.696981724" watchObservedRunningTime="2026-01-04 12:10:31.866543819 +0000 UTC m=+1330.723730528" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.879901 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958074 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958138 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958177 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958425 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958464 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkxv6\" (UniqueName: \"kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958548 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958590 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:31 crc kubenswrapper[4797]: I0104 12:10:31.958614 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060068 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060104 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkxv6\" (UniqueName: \"kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060141 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060168 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060185 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060332 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060376 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.060398 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.061140 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.061224 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.069638 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.070104 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.073609 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.073650 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.073683 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.076859 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkxv6\" (UniqueName: \"kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6\") pod \"ceilometer-0\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.264125 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.298411 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.365786 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.365823 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.365898 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.365998 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6lmt\" (UniqueName: \"kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.366016 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.366064 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.366080 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.366103 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data\") pod \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\" (UID: \"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9\") " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.367756 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs" (OuterVolumeSpecName: "logs") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.368145 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.373437 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts" (OuterVolumeSpecName: "scripts") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.376384 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.377851 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt" (OuterVolumeSpecName: "kube-api-access-h6lmt") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "kube-api-access-h6lmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.416303 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.426385 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.431161 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data" (OuterVolumeSpecName: "config-data") pod "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" (UID: "81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468883 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6lmt\" (UniqueName: \"kubernetes.io/projected/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-kube-api-access-h6lmt\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468918 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468955 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468965 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468974 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.468996 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.469004 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.469012 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.500179 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.555361 4797 generic.go:334] "Generic (PLEG): container finished" podID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerID="2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898" exitCode=0 Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.555411 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.555453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerDied","Data":"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898"} Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.555482 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9","Type":"ContainerDied","Data":"725ee05498b3384ec6889ea6afdfbcabcb8c4c893d3874fd7cb34a0be4152a5b"} Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.555500 4797 scope.go:117] "RemoveContainer" containerID="2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.570208 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.576341 4797 scope.go:117] "RemoveContainer" containerID="4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.591613 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.600079 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.614750 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:32 crc kubenswrapper[4797]: E0104 12:10:32.615135 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-log" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.615155 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-log" Jan 04 12:10:32 crc kubenswrapper[4797]: E0104 12:10:32.615187 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-httpd" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.615194 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-httpd" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.615356 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-httpd" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.615376 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" containerName="glance-log" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.617400 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.619820 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.620132 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.626075 4797 scope.go:117] "RemoveContainer" containerID="2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898" Jan 04 12:10:32 crc kubenswrapper[4797]: E0104 12:10:32.626443 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898\": container with ID starting with 2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898 not found: ID does not exist" containerID="2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.626470 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898"} err="failed to get container status \"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898\": rpc error: code = NotFound desc = could not find container \"2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898\": container with ID starting with 2aeda0efbdb93474362fdaff42602df918ebc94c5cda8feb0b42f24ec41cd898 not found: ID does not exist" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.626490 4797 scope.go:117] "RemoveContainer" containerID="4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989" Jan 04 12:10:32 crc kubenswrapper[4797]: E0104 12:10:32.627228 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989\": container with ID starting with 4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989 not found: ID does not exist" containerID="4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.627274 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989"} err="failed to get container status \"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989\": rpc error: code = NotFound desc = could not find container \"4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989\": container with ID starting with 4ed95c43b8ef142ef0364e294e821526ba61e3609e92315223dca5e47d237989 not found: ID does not exist" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.630962 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.770508 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.773789 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.773855 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82kzz\" (UniqueName: \"kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.773912 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.774057 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.774110 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.774165 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.776338 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.776421 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877688 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877761 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82kzz\" (UniqueName: \"kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877815 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877876 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877908 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877942 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.877981 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.878028 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.878296 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.879224 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.880319 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.884001 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.884955 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.889404 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.889674 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.897687 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82kzz\" (UniqueName: \"kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.904403 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:32 crc kubenswrapper[4797]: I0104 12:10:32.939334 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.418409 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.483467 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9" path="/var/lib/kubelet/pods/81b3ed01-3a7e-4fdb-8ad3-28a82f0e48c9/volumes" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.484168 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aea92db-fe3e-4665-a95b-ed9450c01ca2" path="/var/lib/kubelet/pods/8aea92db-fe3e-4665-a95b-ed9450c01ca2/volumes" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491566 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491612 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtqrg\" (UniqueName: \"kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491653 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491668 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491698 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491731 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491813 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.491845 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle\") pod \"18040641-e171-485a-9162-977e909fd416\" (UID: \"18040641-e171-485a-9162-977e909fd416\") " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.501233 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs" (OuterVolumeSpecName: "logs") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.502041 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.506188 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts" (OuterVolumeSpecName: "scripts") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.506309 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.515473 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg" (OuterVolumeSpecName: "kube-api-access-rtqrg") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "kube-api-access-rtqrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.532097 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.544806 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.567299 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerStarted","Data":"da218aa1ec4703d3a0dcbfaf72c093b6c819dddbd7fd4bce6497b17b42f0d5b3"} Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.571282 4797 generic.go:334] "Generic (PLEG): container finished" podID="18040641-e171-485a-9162-977e909fd416" containerID="cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46" exitCode=0 Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.571356 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerDied","Data":"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46"} Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.571365 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.571390 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"18040641-e171-485a-9162-977e909fd416","Type":"ContainerDied","Data":"5c7f41dd8f931cafa043b2d1e43238451337b0d677e6f1371f4a14bba2e547b8"} Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.571410 4797 scope.go:117] "RemoveContainer" containerID="cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.574958 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerStarted","Data":"92fad1304fac4c970c9bdf7a5b6e47169e209fc38555102ec90fa0f047a27435"} Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.577836 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data" (OuterVolumeSpecName: "config-data") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593829 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593868 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtqrg\" (UniqueName: \"kubernetes.io/projected/18040641-e171-485a-9162-977e909fd416-kube-api-access-rtqrg\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593883 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593898 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18040641-e171-485a-9162-977e909fd416-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593923 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593935 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.593949 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.595384 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "18040641-e171-485a-9162-977e909fd416" (UID: "18040641-e171-485a-9162-977e909fd416"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.596997 4797 scope.go:117] "RemoveContainer" containerID="6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.601260 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ef8e3a41-8586-4918-b62a-0e1b49eab563" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.150:3000/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.625619 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.630145 4797 scope.go:117] "RemoveContainer" containerID="cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46" Jan 04 12:10:33 crc kubenswrapper[4797]: E0104 12:10:33.630887 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46\": container with ID starting with cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46 not found: ID does not exist" containerID="cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.630948 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46"} err="failed to get container status \"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46\": rpc error: code = NotFound desc = could not find container \"cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46\": container with ID starting with cf176b5ade4cf429f4fa77afe665077bc9c3c29d2e87d4c39a03b1e73e8c1c46 not found: ID does not exist" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.630979 4797 scope.go:117] "RemoveContainer" containerID="6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15" Jan 04 12:10:33 crc kubenswrapper[4797]: E0104 12:10:33.631357 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15\": container with ID starting with 6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15 not found: ID does not exist" containerID="6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.631378 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15"} err="failed to get container status \"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15\": rpc error: code = NotFound desc = could not find container \"6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15\": container with ID starting with 6fce0f11f7c1b4910bb51914d338defebbc460edffc0518810e00de4badf2f15 not found: ID does not exist" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.695526 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18040641-e171-485a-9162-977e909fd416-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.695562 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.906491 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.914046 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.942652 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:33 crc kubenswrapper[4797]: E0104 12:10:33.943075 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-log" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.943090 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-log" Jan 04 12:10:33 crc kubenswrapper[4797]: E0104 12:10:33.943106 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-httpd" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.943130 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-httpd" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.943301 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-httpd" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.943322 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="18040641-e171-485a-9162-977e909fd416" containerName="glance-log" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.944164 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.947292 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.947522 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 04 12:10:33 crc kubenswrapper[4797]: I0104 12:10:33.979408 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.000977 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk28t\" (UniqueName: \"kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001056 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001173 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001254 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001291 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001404 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001592 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.001627 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103201 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103554 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103589 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103605 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103639 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103705 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103727 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.103793 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk28t\" (UniqueName: \"kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.104496 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.104550 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.104835 4797 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.109961 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.110859 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.111133 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.112565 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.125185 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk28t\" (UniqueName: \"kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.147034 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.303632 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.586886 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerStarted","Data":"b938370d51f4a430f6412957c6f857f3fdff9becd007e40941b645df2780bb31"} Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.587321 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerStarted","Data":"15bea94fea63ace192dfc26d8e02b1edc4fe12a5c7f970d0889db6fa26d86edf"} Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.590478 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerStarted","Data":"180cd379c89ba5c47f18c6ef1c23999a1a2f27dccb309fa51f167df314999171"} Jan 04 12:10:34 crc kubenswrapper[4797]: I0104 12:10:34.848535 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:35 crc kubenswrapper[4797]: I0104 12:10:35.485588 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18040641-e171-485a-9162-977e909fd416" path="/var/lib/kubelet/pods/18040641-e171-485a-9162-977e909fd416/volumes" Jan 04 12:10:35 crc kubenswrapper[4797]: I0104 12:10:35.598790 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerStarted","Data":"860c56f0ef4f536ba402454ce8600892b210ad29396896e9e1c61e0dcc8a1dc3"} Jan 04 12:10:36 crc kubenswrapper[4797]: I0104 12:10:36.615165 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerStarted","Data":"dc4a21b1db6be2e07ca76bdb6c4c04513e5342187c0ece820874f1a86da403bc"} Jan 04 12:10:36 crc kubenswrapper[4797]: I0104 12:10:36.618298 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerStarted","Data":"12a99c3f2374bcf465e4dea1ca963d10cfbd969365c68cb800d0058e6b3033aa"} Jan 04 12:10:36 crc kubenswrapper[4797]: I0104 12:10:36.656032 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.656013671 podStartE2EDuration="4.656013671s" podCreationTimestamp="2026-01-04 12:10:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:36.64346686 +0000 UTC m=+1335.500653569" watchObservedRunningTime="2026-01-04 12:10:36.656013671 +0000 UTC m=+1335.513200380" Jan 04 12:10:37 crc kubenswrapper[4797]: I0104 12:10:37.630955 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerStarted","Data":"195feab2fc5a30a29388929e5db7f342ff1fd19485ed5d54f1b0daa3a22494a5"} Jan 04 12:10:37 crc kubenswrapper[4797]: I0104 12:10:37.636858 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerStarted","Data":"22752eb1ddb051967b6c0036a2cf45902cbf1100e84fcd74f027afd664671caf"} Jan 04 12:10:37 crc kubenswrapper[4797]: I0104 12:10:37.670410 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.670384439 podStartE2EDuration="4.670384439s" podCreationTimestamp="2026-01-04 12:10:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:37.666327262 +0000 UTC m=+1336.523514051" watchObservedRunningTime="2026-01-04 12:10:37.670384439 +0000 UTC m=+1336.527571148" Jan 04 12:10:38 crc kubenswrapper[4797]: I0104 12:10:38.661975 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerStarted","Data":"d5bfda83e1144c987cb1ddfd72ecdfdce7d03221023306091c8029dc7bea793f"} Jan 04 12:10:38 crc kubenswrapper[4797]: I0104 12:10:38.662294 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:10:38 crc kubenswrapper[4797]: I0104 12:10:38.694585 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.390571278 podStartE2EDuration="7.694555886s" podCreationTimestamp="2026-01-04 12:10:31 +0000 UTC" firstStartedPulling="2026-01-04 12:10:32.764224853 +0000 UTC m=+1331.621411562" lastFinishedPulling="2026-01-04 12:10:38.068209461 +0000 UTC m=+1336.925396170" observedRunningTime="2026-01-04 12:10:38.683919835 +0000 UTC m=+1337.541106624" watchObservedRunningTime="2026-01-04 12:10:38.694555886 +0000 UTC m=+1337.551742625" Jan 04 12:10:41 crc kubenswrapper[4797]: I0104 12:10:41.688348 4797 generic.go:334] "Generic (PLEG): container finished" podID="ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" containerID="a7c056981801443d31bb93252636ab9e089edd5eeabc8686d37b7b291f4eac9c" exitCode=0 Jan 04 12:10:41 crc kubenswrapper[4797]: I0104 12:10:41.688445 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" event={"ID":"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb","Type":"ContainerDied","Data":"a7c056981801443d31bb93252636ab9e089edd5eeabc8686d37b7b291f4eac9c"} Jan 04 12:10:42 crc kubenswrapper[4797]: I0104 12:10:42.941203 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:10:42 crc kubenswrapper[4797]: I0104 12:10:42.942637 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:10:42 crc kubenswrapper[4797]: I0104 12:10:42.994374 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.005503 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.102763 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.291501 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data\") pod \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.291729 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts\") pod \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.292710 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6xww\" (UniqueName: \"kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww\") pod \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.292960 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle\") pod \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\" (UID: \"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb\") " Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.298542 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww" (OuterVolumeSpecName: "kube-api-access-t6xww") pod "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" (UID: "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb"). InnerVolumeSpecName "kube-api-access-t6xww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.303262 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts" (OuterVolumeSpecName: "scripts") pod "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" (UID: "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.320356 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data" (OuterVolumeSpecName: "config-data") pod "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" (UID: "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.330821 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" (UID: "ebe6fbd9-7112-4c2c-a088-2da12bce0ddb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.397219 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.397694 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6xww\" (UniqueName: \"kubernetes.io/projected/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-kube-api-access-t6xww\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.397720 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.397737 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.713618 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.713604 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wjgc4" event={"ID":"ebe6fbd9-7112-4c2c-a088-2da12bce0ddb","Type":"ContainerDied","Data":"a6db5f1a814f783eaebb0c5a6a72c6ba5abc35ad9671db5150195f982fd22c45"} Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.714967 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6db5f1a814f783eaebb0c5a6a72c6ba5abc35ad9671db5150195f982fd22c45" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.715044 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.715094 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.867085 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:10:43 crc kubenswrapper[4797]: E0104 12:10:43.867497 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" containerName="nova-cell0-conductor-db-sync" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.867517 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" containerName="nova-cell0-conductor-db-sync" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.867750 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" containerName="nova-cell0-conductor-db-sync" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.868423 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.871135 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-h575v" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.871381 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 04 12:10:43 crc kubenswrapper[4797]: I0104 12:10:43.888149 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.007845 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vklgx\" (UniqueName: \"kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.007917 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.008142 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.110059 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.110220 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vklgx\" (UniqueName: \"kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.110262 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.114424 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.114463 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.128946 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vklgx\" (UniqueName: \"kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx\") pod \"nova-cell0-conductor-0\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.185559 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.304364 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.304409 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.345156 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.345594 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.680322 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.726164 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"db00660d-8e07-4dd6-80ec-9d85f9902af4","Type":"ContainerStarted","Data":"7eb6a624be0a174329903f0d6596c15793a68184c45c4572451cea2e7b47e768"} Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.726868 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:44 crc kubenswrapper[4797]: I0104 12:10:44.726911 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.736642 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"db00660d-8e07-4dd6-80ec-9d85f9902af4","Type":"ContainerStarted","Data":"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9"} Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.736670 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.736961 4797 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.737292 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.753205 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.753174448 podStartE2EDuration="2.753174448s" podCreationTimestamp="2026-01-04 12:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:45.752630794 +0000 UTC m=+1344.609817553" watchObservedRunningTime="2026-01-04 12:10:45.753174448 +0000 UTC m=+1344.610361197" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.860438 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:45 crc kubenswrapper[4797]: I0104 12:10:45.861700 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:46 crc kubenswrapper[4797]: I0104 12:10:46.532785 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:46 crc kubenswrapper[4797]: I0104 12:10:46.589668 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.228269 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.492660 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.492719 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.840577 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-zr4cn"] Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.849087 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.859328 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.860438 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.865684 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.865736 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.865769 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kncv\" (UniqueName: \"kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.860202 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.906486 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zr4cn"] Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.969479 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.969571 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.969591 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.969630 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kncv\" (UniqueName: \"kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.976547 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.977732 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.998687 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kncv\" (UniqueName: \"kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:49 crc kubenswrapper[4797]: I0104 12:10:49.999624 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zr4cn\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.045902 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.048339 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.060298 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.065459 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.072934 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.073025 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.073109 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hhmc\" (UniqueName: \"kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.138514 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.140089 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.142590 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.151037 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174578 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hhmc\" (UniqueName: \"kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174661 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174686 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174717 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174751 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlxfg\" (UniqueName: \"kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174785 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.174831 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.194805 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.200720 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.202165 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.217095 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.218224 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hhmc\" (UniqueName: \"kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc\") pod \"nova-scheduler-0\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.219044 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.222508 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.241009 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276441 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlxfg\" (UniqueName: \"kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276512 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276558 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jzzc\" (UniqueName: \"kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276593 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276810 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276862 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276909 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.276937 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.277380 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.282074 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.283465 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.287641 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.287939 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.312720 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.318307 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlxfg\" (UniqueName: \"kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg\") pod \"nova-api-0\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.338585 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.374031 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.375743 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380541 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbzh5\" (UniqueName: \"kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380644 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380749 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380809 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380835 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jzzc\" (UniqueName: \"kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380884 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.380927 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.384777 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.385696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.386074 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.408303 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.414621 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.415191 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jzzc\" (UniqueName: \"kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc\") pod \"nova-metadata-0\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.482877 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.482930 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483027 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483065 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483082 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483099 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrr5c\" (UniqueName: \"kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483130 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbzh5\" (UniqueName: \"kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483168 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.483219 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.488393 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.489649 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.504302 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbzh5\" (UniqueName: \"kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.587712 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.587852 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.587870 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.587889 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrr5c\" (UniqueName: \"kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.587943 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.588013 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.588744 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.589240 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.589713 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.590199 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.590913 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.612239 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrr5c\" (UniqueName: \"kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c\") pod \"dnsmasq-dns-5bfb54f9b5-dd9vt\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.618841 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.654427 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.670392 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.716036 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.763606 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zr4cn"] Jan 04 12:10:50 crc kubenswrapper[4797]: W0104 12:10:50.807156 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaee8bb47_3406_4f2c_8159_e6b9031ef090.slice/crio-88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1 WatchSource:0}: Error finding container 88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1: Status 404 returned error can't find the container with id 88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1 Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.901494 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.977394 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wfvb9"] Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.978603 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.982104 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 04 12:10:50 crc kubenswrapper[4797]: I0104 12:10:50.982325 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 04 12:10:51 crc kubenswrapper[4797]: W0104 12:10:50.999589 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcf028cf_1e5a_4c31_85eb_1ad9e2d6b7da.slice/crio-310381ec03d3b8d68b01f9490f641761dd0c870f455d04dfdb33a71282d89a46 WatchSource:0}: Error finding container 310381ec03d3b8d68b01f9490f641761dd0c870f455d04dfdb33a71282d89a46: Status 404 returned error can't find the container with id 310381ec03d3b8d68b01f9490f641761dd0c870f455d04dfdb33a71282d89a46 Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.003327 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.003389 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7l5l\" (UniqueName: \"kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.003409 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.003478 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.018469 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wfvb9"] Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.104881 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7l5l\" (UniqueName: \"kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.104924 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.105003 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.105063 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.112718 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.112881 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.114855 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.122798 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7l5l\" (UniqueName: \"kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l\") pod \"nova-cell1-conductor-db-sync-wfvb9\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.153540 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.294015 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:10:51 crc kubenswrapper[4797]: W0104 12:10:51.298328 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5d4aa7d_4d04_4811_9220_0100fb7cf98e.slice/crio-6944ad56e11adadc405242b913cb6ee0a1314799741bff2528d907355b840037 WatchSource:0}: Error finding container 6944ad56e11adadc405242b913cb6ee0a1314799741bff2528d907355b840037: Status 404 returned error can't find the container with id 6944ad56e11adadc405242b913cb6ee0a1314799741bff2528d907355b840037 Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.301605 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:10:51 crc kubenswrapper[4797]: W0104 12:10:51.308675 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod977fcbf9_a952_4536_92ed_c4bc6dd86887.slice/crio-9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743 WatchSource:0}: Error finding container 9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743: Status 404 returned error can't find the container with id 9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743 Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.417478 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.446330 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.802201 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerStarted","Data":"f0839695525e98f6072ecd7a27657ddc665d63c17e9f62008ca8dc02ca59b591"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.803810 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerStarted","Data":"409d5e1ee887625f566e6cce9f545b9514b334fa6e3a38fc340c0473087cd4c6"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.805425 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da","Type":"ContainerStarted","Data":"310381ec03d3b8d68b01f9490f641761dd0c870f455d04dfdb33a71282d89a46"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.806908 4797 generic.go:334] "Generic (PLEG): container finished" podID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerID="cb7bf4291a031834da82973f2a48f5bc74729925017712ac6f2b09e3b4134c5a" exitCode=0 Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.807014 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" event={"ID":"977fcbf9-a952-4536-92ed-c4bc6dd86887","Type":"ContainerDied","Data":"cb7bf4291a031834da82973f2a48f5bc74729925017712ac6f2b09e3b4134c5a"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.807045 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" event={"ID":"977fcbf9-a952-4536-92ed-c4bc6dd86887","Type":"ContainerStarted","Data":"9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.811279 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zr4cn" event={"ID":"aee8bb47-3406-4f2c-8159-e6b9031ef090","Type":"ContainerStarted","Data":"c5681af799eef3f20aaf548557caaad36b40bee661be1cbc39d9809e2d98ecb9"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.811328 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zr4cn" event={"ID":"aee8bb47-3406-4f2c-8159-e6b9031ef090","Type":"ContainerStarted","Data":"88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.812500 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5d4aa7d-4d04-4811-9220-0100fb7cf98e","Type":"ContainerStarted","Data":"6944ad56e11adadc405242b913cb6ee0a1314799741bff2528d907355b840037"} Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.860615 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-zr4cn" podStartSLOduration=2.860597287 podStartE2EDuration="2.860597287s" podCreationTimestamp="2026-01-04 12:10:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:51.844577045 +0000 UTC m=+1350.701763754" watchObservedRunningTime="2026-01-04 12:10:51.860597287 +0000 UTC m=+1350.717783996" Jan 04 12:10:51 crc kubenswrapper[4797]: I0104 12:10:51.901648 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wfvb9"] Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.822275 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" event={"ID":"88052d87-10b0-4814-9cc7-62e7040b415a","Type":"ContainerStarted","Data":"e9781d8b11b2059d23370987bd4fe4ba9a84feaaa9d8282c33746602aff56cd0"} Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.822677 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" event={"ID":"88052d87-10b0-4814-9cc7-62e7040b415a","Type":"ContainerStarted","Data":"dbacc940b0f2ddf5e2c04e4d0229cd4a6fdd980f142b065da978bae8020159b3"} Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.828254 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" event={"ID":"977fcbf9-a952-4536-92ed-c4bc6dd86887","Type":"ContainerStarted","Data":"5a8f25d330d8d93b94b4f89f5ab02f3d9ff1f469e845e6f615fa4dfc0aa9ad40"} Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.828296 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.837504 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" podStartSLOduration=2.837486368 podStartE2EDuration="2.837486368s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:52.834031797 +0000 UTC m=+1351.691218506" watchObservedRunningTime="2026-01-04 12:10:52.837486368 +0000 UTC m=+1351.694673077" Jan 04 12:10:52 crc kubenswrapper[4797]: I0104 12:10:52.856799 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" podStartSLOduration=2.856780697 podStartE2EDuration="2.856780697s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:52.851539118 +0000 UTC m=+1351.708725827" watchObservedRunningTime="2026-01-04 12:10:52.856780697 +0000 UTC m=+1351.713967406" Jan 04 12:10:53 crc kubenswrapper[4797]: I0104 12:10:53.697904 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:10:53 crc kubenswrapper[4797]: I0104 12:10:53.734114 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.855037 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerStarted","Data":"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c"} Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.857203 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da","Type":"ContainerStarted","Data":"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2"} Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.859228 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5d4aa7d-4d04-4811-9220-0100fb7cf98e","Type":"ContainerStarted","Data":"5eb75630dfbca09b68903f956d04849bc104e20fc6589774c3712547886605a9"} Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.859327 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5eb75630dfbca09b68903f956d04849bc104e20fc6589774c3712547886605a9" gracePeriod=30 Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.861539 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerStarted","Data":"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0"} Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.879880 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.522007045 podStartE2EDuration="5.879866404s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="2026-01-04 12:10:51.001890081 +0000 UTC m=+1349.859076790" lastFinishedPulling="2026-01-04 12:10:55.35974944 +0000 UTC m=+1354.216936149" observedRunningTime="2026-01-04 12:10:55.878933859 +0000 UTC m=+1354.736120588" watchObservedRunningTime="2026-01-04 12:10:55.879866404 +0000 UTC m=+1354.737053113" Jan 04 12:10:55 crc kubenswrapper[4797]: I0104 12:10:55.904668 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.846444735 podStartE2EDuration="5.904644527s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="2026-01-04 12:10:51.301458235 +0000 UTC m=+1350.158644934" lastFinishedPulling="2026-01-04 12:10:55.359658017 +0000 UTC m=+1354.216844726" observedRunningTime="2026-01-04 12:10:55.894935241 +0000 UTC m=+1354.752121970" watchObservedRunningTime="2026-01-04 12:10:55.904644527 +0000 UTC m=+1354.761831236" Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.874478 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerStarted","Data":"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4"} Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.874670 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-log" containerID="cri-o://86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" gracePeriod=30 Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.875236 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-metadata" containerID="cri-o://6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" gracePeriod=30 Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.885398 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerStarted","Data":"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4"} Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.939075 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.015703854 podStartE2EDuration="6.939050713s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="2026-01-04 12:10:51.455418172 +0000 UTC m=+1350.312604881" lastFinishedPulling="2026-01-04 12:10:55.378764991 +0000 UTC m=+1354.235951740" observedRunningTime="2026-01-04 12:10:56.930399125 +0000 UTC m=+1355.787585854" watchObservedRunningTime="2026-01-04 12:10:56.939050713 +0000 UTC m=+1355.796237432" Jan 04 12:10:56 crc kubenswrapper[4797]: I0104 12:10:56.967128 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.762668346 podStartE2EDuration="6.967109133s" podCreationTimestamp="2026-01-04 12:10:50 +0000 UTC" firstStartedPulling="2026-01-04 12:10:51.170859313 +0000 UTC m=+1350.028046032" lastFinishedPulling="2026-01-04 12:10:55.37530011 +0000 UTC m=+1354.232486819" observedRunningTime="2026-01-04 12:10:56.957418137 +0000 UTC m=+1355.814604916" watchObservedRunningTime="2026-01-04 12:10:56.967109133 +0000 UTC m=+1355.824295852" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.507112 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.652182 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data\") pod \"ccef00dc-6d96-4198-9dac-add06c2b3692\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.652259 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle\") pod \"ccef00dc-6d96-4198-9dac-add06c2b3692\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.652319 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jzzc\" (UniqueName: \"kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc\") pod \"ccef00dc-6d96-4198-9dac-add06c2b3692\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.652446 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs\") pod \"ccef00dc-6d96-4198-9dac-add06c2b3692\" (UID: \"ccef00dc-6d96-4198-9dac-add06c2b3692\") " Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.653144 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs" (OuterVolumeSpecName: "logs") pod "ccef00dc-6d96-4198-9dac-add06c2b3692" (UID: "ccef00dc-6d96-4198-9dac-add06c2b3692"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.658019 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc" (OuterVolumeSpecName: "kube-api-access-6jzzc") pod "ccef00dc-6d96-4198-9dac-add06c2b3692" (UID: "ccef00dc-6d96-4198-9dac-add06c2b3692"). InnerVolumeSpecName "kube-api-access-6jzzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.699451 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data" (OuterVolumeSpecName: "config-data") pod "ccef00dc-6d96-4198-9dac-add06c2b3692" (UID: "ccef00dc-6d96-4198-9dac-add06c2b3692"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.702811 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccef00dc-6d96-4198-9dac-add06c2b3692" (UID: "ccef00dc-6d96-4198-9dac-add06c2b3692"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.755660 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jzzc\" (UniqueName: \"kubernetes.io/projected/ccef00dc-6d96-4198-9dac-add06c2b3692-kube-api-access-6jzzc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.755707 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccef00dc-6d96-4198-9dac-add06c2b3692-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.755726 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.755743 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccef00dc-6d96-4198-9dac-add06c2b3692-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.898713 4797 generic.go:334] "Generic (PLEG): container finished" podID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerID="6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" exitCode=0 Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.898763 4797 generic.go:334] "Generic (PLEG): container finished" podID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerID="86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" exitCode=143 Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.900295 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.900450 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerDied","Data":"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4"} Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.900502 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerDied","Data":"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c"} Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.900523 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ccef00dc-6d96-4198-9dac-add06c2b3692","Type":"ContainerDied","Data":"409d5e1ee887625f566e6cce9f545b9514b334fa6e3a38fc340c0473087cd4c6"} Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.900549 4797 scope.go:117] "RemoveContainer" containerID="6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.950560 4797 scope.go:117] "RemoveContainer" containerID="86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.969778 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.991724 4797 scope.go:117] "RemoveContainer" containerID="6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" Jan 04 12:10:57 crc kubenswrapper[4797]: E0104 12:10:57.992379 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4\": container with ID starting with 6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4 not found: ID does not exist" containerID="6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992427 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4"} err="failed to get container status \"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4\": rpc error: code = NotFound desc = could not find container \"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4\": container with ID starting with 6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4 not found: ID does not exist" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992458 4797 scope.go:117] "RemoveContainer" containerID="86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" Jan 04 12:10:57 crc kubenswrapper[4797]: E0104 12:10:57.992689 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c\": container with ID starting with 86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c not found: ID does not exist" containerID="86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992720 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c"} err="failed to get container status \"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c\": rpc error: code = NotFound desc = could not find container \"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c\": container with ID starting with 86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c not found: ID does not exist" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992744 4797 scope.go:117] "RemoveContainer" containerID="6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992939 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4"} err="failed to get container status \"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4\": rpc error: code = NotFound desc = could not find container \"6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4\": container with ID starting with 6bc0939d902c563da6ca04642b6c4b92216ffddf1196978e651859df250848c4 not found: ID does not exist" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.992962 4797 scope.go:117] "RemoveContainer" containerID="86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.993264 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c"} err="failed to get container status \"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c\": rpc error: code = NotFound desc = could not find container \"86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c\": container with ID starting with 86214d8a535ce1097cc7897984dd695a782342b699b937bb7951934760a9f70c not found: ID does not exist" Jan 04 12:10:57 crc kubenswrapper[4797]: I0104 12:10:57.998380 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.008674 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:58 crc kubenswrapper[4797]: E0104 12:10:58.009170 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-log" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.009191 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-log" Jan 04 12:10:58 crc kubenswrapper[4797]: E0104 12:10:58.009214 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-metadata" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.009223 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-metadata" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.009476 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-log" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.009496 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" containerName="nova-metadata-metadata" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.010607 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.015417 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.015450 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.024467 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.165134 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zgwz\" (UniqueName: \"kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.165189 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.165261 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.165320 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.165626 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266335 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266425 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266519 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266539 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zgwz\" (UniqueName: \"kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266562 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.266913 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.282019 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.282402 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.286940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zgwz\" (UniqueName: \"kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.287051 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.338768 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.790016 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:10:58 crc kubenswrapper[4797]: W0104 12:10:58.796312 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfda9abb5_f11e_49f3_8536_ae098f9f1dfe.slice/crio-311e1ec2a1596287d5479178385dd0f324cf9513bcdf164f3fd39f859db8fba0 WatchSource:0}: Error finding container 311e1ec2a1596287d5479178385dd0f324cf9513bcdf164f3fd39f859db8fba0: Status 404 returned error can't find the container with id 311e1ec2a1596287d5479178385dd0f324cf9513bcdf164f3fd39f859db8fba0 Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.911158 4797 generic.go:334] "Generic (PLEG): container finished" podID="aee8bb47-3406-4f2c-8159-e6b9031ef090" containerID="c5681af799eef3f20aaf548557caaad36b40bee661be1cbc39d9809e2d98ecb9" exitCode=0 Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.911234 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zr4cn" event={"ID":"aee8bb47-3406-4f2c-8159-e6b9031ef090","Type":"ContainerDied","Data":"c5681af799eef3f20aaf548557caaad36b40bee661be1cbc39d9809e2d98ecb9"} Jan 04 12:10:58 crc kubenswrapper[4797]: I0104 12:10:58.915608 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerStarted","Data":"311e1ec2a1596287d5479178385dd0f324cf9513bcdf164f3fd39f859db8fba0"} Jan 04 12:10:59 crc kubenswrapper[4797]: I0104 12:10:59.507134 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccef00dc-6d96-4198-9dac-add06c2b3692" path="/var/lib/kubelet/pods/ccef00dc-6d96-4198-9dac-add06c2b3692/volumes" Jan 04 12:10:59 crc kubenswrapper[4797]: I0104 12:10:59.930240 4797 generic.go:334] "Generic (PLEG): container finished" podID="88052d87-10b0-4814-9cc7-62e7040b415a" containerID="e9781d8b11b2059d23370987bd4fe4ba9a84feaaa9d8282c33746602aff56cd0" exitCode=0 Jan 04 12:10:59 crc kubenswrapper[4797]: I0104 12:10:59.930344 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" event={"ID":"88052d87-10b0-4814-9cc7-62e7040b415a","Type":"ContainerDied","Data":"e9781d8b11b2059d23370987bd4fe4ba9a84feaaa9d8282c33746602aff56cd0"} Jan 04 12:10:59 crc kubenswrapper[4797]: I0104 12:10:59.933434 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerStarted","Data":"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc"} Jan 04 12:10:59 crc kubenswrapper[4797]: I0104 12:10:59.933486 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerStarted","Data":"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23"} Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.036812 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.036790258 podStartE2EDuration="3.036790258s" podCreationTimestamp="2026-01-04 12:10:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:00.001460427 +0000 UTC m=+1358.858647166" watchObservedRunningTime="2026-01-04 12:11:00.036790258 +0000 UTC m=+1358.893976977" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.376258 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.409460 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.409530 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.442106 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.509672 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data\") pod \"aee8bb47-3406-4f2c-8159-e6b9031ef090\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.510334 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kncv\" (UniqueName: \"kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv\") pod \"aee8bb47-3406-4f2c-8159-e6b9031ef090\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.510922 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts\") pod \"aee8bb47-3406-4f2c-8159-e6b9031ef090\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.511018 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle\") pod \"aee8bb47-3406-4f2c-8159-e6b9031ef090\" (UID: \"aee8bb47-3406-4f2c-8159-e6b9031ef090\") " Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.519169 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv" (OuterVolumeSpecName: "kube-api-access-8kncv") pod "aee8bb47-3406-4f2c-8159-e6b9031ef090" (UID: "aee8bb47-3406-4f2c-8159-e6b9031ef090"). InnerVolumeSpecName "kube-api-access-8kncv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.522157 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts" (OuterVolumeSpecName: "scripts") pod "aee8bb47-3406-4f2c-8159-e6b9031ef090" (UID: "aee8bb47-3406-4f2c-8159-e6b9031ef090"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.556195 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data" (OuterVolumeSpecName: "config-data") pod "aee8bb47-3406-4f2c-8159-e6b9031ef090" (UID: "aee8bb47-3406-4f2c-8159-e6b9031ef090"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.567547 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aee8bb47-3406-4f2c-8159-e6b9031ef090" (UID: "aee8bb47-3406-4f2c-8159-e6b9031ef090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.613430 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.613486 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.613510 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kncv\" (UniqueName: \"kubernetes.io/projected/aee8bb47-3406-4f2c-8159-e6b9031ef090-kube-api-access-8kncv\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.613533 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aee8bb47-3406-4f2c-8159-e6b9031ef090-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.620048 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.620117 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.671348 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.717916 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.820579 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.820829 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="dnsmasq-dns" containerID="cri-o://72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88" gracePeriod=10 Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.960635 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zr4cn" event={"ID":"aee8bb47-3406-4f2c-8159-e6b9031ef090","Type":"ContainerDied","Data":"88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1"} Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.960687 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88b194503fc1c28405bcb9246b4f278f93c225f97583325fd4422ffecff84fd1" Jan 04 12:11:00 crc kubenswrapper[4797]: I0104 12:11:00.960750 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zr4cn" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.002540 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.119851 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.120156 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-log" containerID="cri-o://19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0" gracePeriod=30 Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.120316 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-api" containerID="cri-o://0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4" gracePeriod=30 Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.131094 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": EOF" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.131266 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": EOF" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.152122 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.383682 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.441555 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.497441 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.532245 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts\") pod \"88052d87-10b0-4814-9cc7-62e7040b415a\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.532341 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle\") pod \"88052d87-10b0-4814-9cc7-62e7040b415a\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.532360 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7l5l\" (UniqueName: \"kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l\") pod \"88052d87-10b0-4814-9cc7-62e7040b415a\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.532388 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data\") pod \"88052d87-10b0-4814-9cc7-62e7040b415a\" (UID: \"88052d87-10b0-4814-9cc7-62e7040b415a\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.538331 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l" (OuterVolumeSpecName: "kube-api-access-r7l5l") pod "88052d87-10b0-4814-9cc7-62e7040b415a" (UID: "88052d87-10b0-4814-9cc7-62e7040b415a"). InnerVolumeSpecName "kube-api-access-r7l5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.538770 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts" (OuterVolumeSpecName: "scripts") pod "88052d87-10b0-4814-9cc7-62e7040b415a" (UID: "88052d87-10b0-4814-9cc7-62e7040b415a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.560476 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88052d87-10b0-4814-9cc7-62e7040b415a" (UID: "88052d87-10b0-4814-9cc7-62e7040b415a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.565304 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data" (OuterVolumeSpecName: "config-data") pod "88052d87-10b0-4814-9cc7-62e7040b415a" (UID: "88052d87-10b0-4814-9cc7-62e7040b415a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.634291 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.634550 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.634672 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.634803 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.634926 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.635108 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpnt2\" (UniqueName: \"kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2\") pod \"061e64ec-34bd-4c16-8afd-8b03537455ad\" (UID: \"061e64ec-34bd-4c16-8afd-8b03537455ad\") " Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.636065 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.636182 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.636245 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7l5l\" (UniqueName: \"kubernetes.io/projected/88052d87-10b0-4814-9cc7-62e7040b415a-kube-api-access-r7l5l\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.636302 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88052d87-10b0-4814-9cc7-62e7040b415a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.639062 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2" (OuterVolumeSpecName: "kube-api-access-zpnt2") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "kube-api-access-zpnt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.677033 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.680716 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.681909 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.691635 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config" (OuterVolumeSpecName: "config") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.695917 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "061e64ec-34bd-4c16-8afd-8b03537455ad" (UID: "061e64ec-34bd-4c16-8afd-8b03537455ad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738784 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738814 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738824 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738837 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738846 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/061e64ec-34bd-4c16-8afd-8b03537455ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.738854 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpnt2\" (UniqueName: \"kubernetes.io/projected/061e64ec-34bd-4c16-8afd-8b03537455ad-kube-api-access-zpnt2\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.969445 4797 generic.go:334] "Generic (PLEG): container finished" podID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerID="72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88" exitCode=0 Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.969499 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" event={"ID":"061e64ec-34bd-4c16-8afd-8b03537455ad","Type":"ContainerDied","Data":"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88"} Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.969524 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" event={"ID":"061e64ec-34bd-4c16-8afd-8b03537455ad","Type":"ContainerDied","Data":"e277dd5c980779b08f403cc97145ecf0ebfb0fc217bb1a7232a9a453e0ddfbab"} Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.969539 4797 scope.go:117] "RemoveContainer" containerID="72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.969638 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b4f5fc4f-wtcrw" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.977589 4797 generic.go:334] "Generic (PLEG): container finished" podID="04294055-465d-4e64-b41a-219eee1f50ad" containerID="19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0" exitCode=143 Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.977616 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerDied","Data":"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0"} Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.979950 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" event={"ID":"88052d87-10b0-4814-9cc7-62e7040b415a","Type":"ContainerDied","Data":"dbacc940b0f2ddf5e2c04e4d0229cd4a6fdd980f142b065da978bae8020159b3"} Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.980164 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbacc940b0f2ddf5e2c04e4d0229cd4a6fdd980f142b065da978bae8020159b3" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.980176 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wfvb9" Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.980193 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-metadata" containerID="cri-o://c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" gracePeriod=30 Jan 04 12:11:01 crc kubenswrapper[4797]: I0104 12:11:01.980096 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-log" containerID="cri-o://b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" gracePeriod=30 Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.024051 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.032131 4797 scope.go:117] "RemoveContainer" containerID="2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.032392 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b4f5fc4f-wtcrw"] Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.094701 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.096470 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="init" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.096498 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="init" Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.096519 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88052d87-10b0-4814-9cc7-62e7040b415a" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.096526 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="88052d87-10b0-4814-9cc7-62e7040b415a" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.096585 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="dnsmasq-dns" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.096592 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="dnsmasq-dns" Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.096609 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee8bb47-3406-4f2c-8159-e6b9031ef090" containerName="nova-manage" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.096615 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee8bb47-3406-4f2c-8159-e6b9031ef090" containerName="nova-manage" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.097194 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="88052d87-10b0-4814-9cc7-62e7040b415a" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.097326 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" containerName="dnsmasq-dns" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.097479 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee8bb47-3406-4f2c-8159-e6b9031ef090" containerName="nova-manage" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.098660 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.106108 4797 scope.go:117] "RemoveContainer" containerID="72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.108190 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.116882 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.127289 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88\": container with ID starting with 72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88 not found: ID does not exist" containerID="72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.127449 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88"} err="failed to get container status \"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88\": rpc error: code = NotFound desc = could not find container \"72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88\": container with ID starting with 72ce53390079a354ab284a082191ba09266c9ef2cc63e0e6e353029dfc583c88 not found: ID does not exist" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.127506 4797 scope.go:117] "RemoveContainer" containerID="2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5" Jan 04 12:11:02 crc kubenswrapper[4797]: E0104 12:11:02.131202 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5\": container with ID starting with 2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5 not found: ID does not exist" containerID="2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.131309 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5"} err="failed to get container status \"2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5\": rpc error: code = NotFound desc = could not find container \"2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5\": container with ID starting with 2e07ea878633f9c5b4ca81f5a0995f87f31b3b02666714de594c7afc26676ea5 not found: ID does not exist" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.251578 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.251667 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.251808 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2g5w\" (UniqueName: \"kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.277069 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.353261 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.353347 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.353523 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2g5w\" (UniqueName: \"kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.358665 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.361075 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.386175 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2g5w\" (UniqueName: \"kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w\") pod \"nova-cell1-conductor-0\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.429269 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.493957 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.659475 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zgwz\" (UniqueName: \"kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz\") pod \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.659731 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs\") pod \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.659768 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs\") pod \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.659795 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data\") pod \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.659813 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle\") pod \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\" (UID: \"fda9abb5-f11e-49f3-8536-ae098f9f1dfe\") " Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.662315 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs" (OuterVolumeSpecName: "logs") pod "fda9abb5-f11e-49f3-8536-ae098f9f1dfe" (UID: "fda9abb5-f11e-49f3-8536-ae098f9f1dfe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.663807 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz" (OuterVolumeSpecName: "kube-api-access-2zgwz") pod "fda9abb5-f11e-49f3-8536-ae098f9f1dfe" (UID: "fda9abb5-f11e-49f3-8536-ae098f9f1dfe"). InnerVolumeSpecName "kube-api-access-2zgwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.685208 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data" (OuterVolumeSpecName: "config-data") pod "fda9abb5-f11e-49f3-8536-ae098f9f1dfe" (UID: "fda9abb5-f11e-49f3-8536-ae098f9f1dfe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.691657 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fda9abb5-f11e-49f3-8536-ae098f9f1dfe" (UID: "fda9abb5-f11e-49f3-8536-ae098f9f1dfe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.708838 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "fda9abb5-f11e-49f3-8536-ae098f9f1dfe" (UID: "fda9abb5-f11e-49f3-8536-ae098f9f1dfe"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.761926 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.761958 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.761971 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zgwz\" (UniqueName: \"kubernetes.io/projected/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-kube-api-access-2zgwz\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.761980 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.762012 4797 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fda9abb5-f11e-49f3-8536-ae098f9f1dfe-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.880248 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.993829 4797 generic.go:334] "Generic (PLEG): container finished" podID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerID="c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" exitCode=0 Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.993870 4797 generic.go:334] "Generic (PLEG): container finished" podID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerID="b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" exitCode=143 Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.993889 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerDied","Data":"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc"} Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.993917 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.993973 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerDied","Data":"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23"} Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.994006 4797 scope.go:117] "RemoveContainer" containerID="c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.994046 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fda9abb5-f11e-49f3-8536-ae098f9f1dfe","Type":"ContainerDied","Data":"311e1ec2a1596287d5479178385dd0f324cf9513bcdf164f3fd39f859db8fba0"} Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.999540 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"970e7570-2ccd-4420-8e1f-70aff6cf2f38","Type":"ContainerStarted","Data":"c4ba556fc45b42f07a38f347b8726e2718bf48adc4ce15a78f26879fc7b36129"} Jan 04 12:11:02 crc kubenswrapper[4797]: I0104 12:11:02.999668 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerName="nova-scheduler-scheduler" containerID="cri-o://730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" gracePeriod=30 Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.026855 4797 scope.go:117] "RemoveContainer" containerID="b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.046842 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.059493 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070229 4797 scope.go:117] "RemoveContainer" containerID="c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070343 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:03 crc kubenswrapper[4797]: E0104 12:11:03.070739 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-log" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070752 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-log" Jan 04 12:11:03 crc kubenswrapper[4797]: E0104 12:11:03.070779 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-metadata" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070786 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-metadata" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070950 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-log" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.070977 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" containerName="nova-metadata-metadata" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.072006 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: E0104 12:11:03.074456 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc\": container with ID starting with c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc not found: ID does not exist" containerID="c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.074511 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc"} err="failed to get container status \"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc\": rpc error: code = NotFound desc = could not find container \"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc\": container with ID starting with c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc not found: ID does not exist" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.074542 4797 scope.go:117] "RemoveContainer" containerID="b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" Jan 04 12:11:03 crc kubenswrapper[4797]: E0104 12:11:03.074908 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23\": container with ID starting with b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23 not found: ID does not exist" containerID="b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.074941 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23"} err="failed to get container status \"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23\": rpc error: code = NotFound desc = could not find container \"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23\": container with ID starting with b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23 not found: ID does not exist" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.074967 4797 scope.go:117] "RemoveContainer" containerID="c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.075362 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc"} err="failed to get container status \"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc\": rpc error: code = NotFound desc = could not find container \"c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc\": container with ID starting with c685a4206dfe752731eaa180858b1bdfb52f75106d0e24f1d4c0f14cd1c06fcc not found: ID does not exist" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.075418 4797 scope.go:117] "RemoveContainer" containerID="b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.075739 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23"} err="failed to get container status \"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23\": rpc error: code = NotFound desc = could not find container \"b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23\": container with ID starting with b5eee581ecffccaa6962b70bceb9af320b1ac9db2a6c3cce787dcd0b1305ed23 not found: ID does not exist" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.077069 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.077394 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.081444 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.175361 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.175485 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn96l\" (UniqueName: \"kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.175533 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.175791 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.175858 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.277495 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.277601 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn96l\" (UniqueName: \"kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.277641 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.277693 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.277714 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.278549 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.282252 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.283218 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.287458 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.293204 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn96l\" (UniqueName: \"kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l\") pod \"nova-metadata-0\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.405589 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.490076 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="061e64ec-34bd-4c16-8afd-8b03537455ad" path="/var/lib/kubelet/pods/061e64ec-34bd-4c16-8afd-8b03537455ad/volumes" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.490746 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda9abb5-f11e-49f3-8536-ae098f9f1dfe" path="/var/lib/kubelet/pods/fda9abb5-f11e-49f3-8536-ae098f9f1dfe/volumes" Jan 04 12:11:03 crc kubenswrapper[4797]: I0104 12:11:03.890422 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:03 crc kubenswrapper[4797]: W0104 12:11:03.894754 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2616e008_314a_449d_a324_5de96bcf8379.slice/crio-c54942f78807d773282f94c7fb89ad442019056de8e649e3452d8257a496a514 WatchSource:0}: Error finding container c54942f78807d773282f94c7fb89ad442019056de8e649e3452d8257a496a514: Status 404 returned error can't find the container with id c54942f78807d773282f94c7fb89ad442019056de8e649e3452d8257a496a514 Jan 04 12:11:04 crc kubenswrapper[4797]: I0104 12:11:04.008234 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"970e7570-2ccd-4420-8e1f-70aff6cf2f38","Type":"ContainerStarted","Data":"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2"} Jan 04 12:11:04 crc kubenswrapper[4797]: I0104 12:11:04.008696 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:04 crc kubenswrapper[4797]: I0104 12:11:04.014372 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerStarted","Data":"c54942f78807d773282f94c7fb89ad442019056de8e649e3452d8257a496a514"} Jan 04 12:11:04 crc kubenswrapper[4797]: I0104 12:11:04.030697 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.030674909 podStartE2EDuration="2.030674909s" podCreationTimestamp="2026-01-04 12:11:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:04.024873415 +0000 UTC m=+1362.882060124" watchObservedRunningTime="2026-01-04 12:11:04.030674909 +0000 UTC m=+1362.887861618" Jan 04 12:11:05 crc kubenswrapper[4797]: I0104 12:11:05.026771 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerStarted","Data":"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62"} Jan 04 12:11:05 crc kubenswrapper[4797]: I0104 12:11:05.027187 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerStarted","Data":"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c"} Jan 04 12:11:05 crc kubenswrapper[4797]: I0104 12:11:05.055378 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.055351594 podStartE2EDuration="2.055351594s" podCreationTimestamp="2026-01-04 12:11:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:05.044092555 +0000 UTC m=+1363.901279294" watchObservedRunningTime="2026-01-04 12:11:05.055351594 +0000 UTC m=+1363.912538333" Jan 04 12:11:05 crc kubenswrapper[4797]: E0104 12:11:05.411899 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:05 crc kubenswrapper[4797]: E0104 12:11:05.413770 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:05 crc kubenswrapper[4797]: E0104 12:11:05.415574 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:05 crc kubenswrapper[4797]: E0104 12:11:05.415712 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerName="nova-scheduler-scheduler" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.609060 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.701957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle\") pod \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.702052 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data\") pod \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.702203 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hhmc\" (UniqueName: \"kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc\") pod \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\" (UID: \"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da\") " Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.710275 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc" (OuterVolumeSpecName: "kube-api-access-2hhmc") pod "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" (UID: "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da"). InnerVolumeSpecName "kube-api-access-2hhmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.735850 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data" (OuterVolumeSpecName: "config-data") pod "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" (UID: "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.748101 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" (UID: "dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.804507 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.804555 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.804569 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hhmc\" (UniqueName: \"kubernetes.io/projected/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da-kube-api-access-2hhmc\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:07 crc kubenswrapper[4797]: I0104 12:11:07.989138 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.060927 4797 generic.go:334] "Generic (PLEG): container finished" podID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" exitCode=0 Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.061053 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.060969 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da","Type":"ContainerDied","Data":"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2"} Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.061183 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da","Type":"ContainerDied","Data":"310381ec03d3b8d68b01f9490f641761dd0c870f455d04dfdb33a71282d89a46"} Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.061205 4797 scope.go:117] "RemoveContainer" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.064530 4797 generic.go:334] "Generic (PLEG): container finished" podID="04294055-465d-4e64-b41a-219eee1f50ad" containerID="0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4" exitCode=0 Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.064557 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerDied","Data":"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4"} Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.064596 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"04294055-465d-4e64-b41a-219eee1f50ad","Type":"ContainerDied","Data":"f0839695525e98f6072ecd7a27657ddc665d63c17e9f62008ca8dc02ca59b591"} Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.064610 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.087609 4797 scope.go:117] "RemoveContainer" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.088058 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2\": container with ID starting with 730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2 not found: ID does not exist" containerID="730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.088161 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2"} err="failed to get container status \"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2\": rpc error: code = NotFound desc = could not find container \"730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2\": container with ID starting with 730e663a4a5fe10b98bdb66442703db656fdade3c8bdf1084d70b0f8955b31b2 not found: ID does not exist" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.088247 4797 scope.go:117] "RemoveContainer" containerID="0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.098495 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.110689 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs\") pod \"04294055-465d-4e64-b41a-219eee1f50ad\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.110826 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlxfg\" (UniqueName: \"kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg\") pod \"04294055-465d-4e64-b41a-219eee1f50ad\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.111960 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs" (OuterVolumeSpecName: "logs") pod "04294055-465d-4e64-b41a-219eee1f50ad" (UID: "04294055-465d-4e64-b41a-219eee1f50ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.110870 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data\") pod \"04294055-465d-4e64-b41a-219eee1f50ad\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.113345 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle\") pod \"04294055-465d-4e64-b41a-219eee1f50ad\" (UID: \"04294055-465d-4e64-b41a-219eee1f50ad\") " Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.114578 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04294055-465d-4e64-b41a-219eee1f50ad-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.131882 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.139639 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg" (OuterVolumeSpecName: "kube-api-access-mlxfg") pod "04294055-465d-4e64-b41a-219eee1f50ad" (UID: "04294055-465d-4e64-b41a-219eee1f50ad"). InnerVolumeSpecName "kube-api-access-mlxfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.143978 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data" (OuterVolumeSpecName: "config-data") pod "04294055-465d-4e64-b41a-219eee1f50ad" (UID: "04294055-465d-4e64-b41a-219eee1f50ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.144687 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04294055-465d-4e64-b41a-219eee1f50ad" (UID: "04294055-465d-4e64-b41a-219eee1f50ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.144860 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.145382 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-log" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145407 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-log" Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.145437 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerName="nova-scheduler-scheduler" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145447 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerName="nova-scheduler-scheduler" Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.145474 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-api" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145482 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-api" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145725 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-log" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145754 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="04294055-465d-4e64-b41a-219eee1f50ad" containerName="nova-api-api" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.145769 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" containerName="nova-scheduler-scheduler" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.146576 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.149102 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.156232 4797 scope.go:117] "RemoveContainer" containerID="19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.156682 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.215949 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z874n\" (UniqueName: \"kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.216180 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.216369 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.216551 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlxfg\" (UniqueName: \"kubernetes.io/projected/04294055-465d-4e64-b41a-219eee1f50ad-kube-api-access-mlxfg\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.216632 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.216718 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04294055-465d-4e64-b41a-219eee1f50ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.237904 4797 scope.go:117] "RemoveContainer" containerID="0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4" Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.238341 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4\": container with ID starting with 0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4 not found: ID does not exist" containerID="0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.238470 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4"} err="failed to get container status \"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4\": rpc error: code = NotFound desc = could not find container \"0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4\": container with ID starting with 0c832c957f495de578df888f637db93f8faef80fd59a6c868b90cd8729a725c4 not found: ID does not exist" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.238574 4797 scope.go:117] "RemoveContainer" containerID="19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0" Jan 04 12:11:08 crc kubenswrapper[4797]: E0104 12:11:08.239004 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0\": container with ID starting with 19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0 not found: ID does not exist" containerID="19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.239049 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0"} err="failed to get container status \"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0\": rpc error: code = NotFound desc = could not find container \"19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0\": container with ID starting with 19e96f9decb4d66565b363566641b7c65cd1e83a7eaab9651c4598d1761a25c0 not found: ID does not exist" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.318517 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.318691 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z874n\" (UniqueName: \"kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.318735 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.323145 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.323325 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.343752 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z874n\" (UniqueName: \"kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n\") pod \"nova-scheduler-0\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.406657 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.406809 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.422166 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.431072 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.452735 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.454338 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.466213 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.502291 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.530258 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.530487 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.530564 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.530588 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtvj2\" (UniqueName: \"kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.537649 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.632862 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.633328 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.633388 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.633399 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.633479 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtvj2\" (UniqueName: \"kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.642138 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.643581 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.648758 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtvj2\" (UniqueName: \"kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2\") pod \"nova-api-0\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.814689 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:08 crc kubenswrapper[4797]: W0104 12:11:08.981574 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b0ee0c0_87fe_415c_acd9_fe2489411ea1.slice/crio-8e33a2271ad8c549b0dc453bb191bc49a32ef82db8d1b0f8629381d395ab7864 WatchSource:0}: Error finding container 8e33a2271ad8c549b0dc453bb191bc49a32ef82db8d1b0f8629381d395ab7864: Status 404 returned error can't find the container with id 8e33a2271ad8c549b0dc453bb191bc49a32ef82db8d1b0f8629381d395ab7864 Jan 04 12:11:08 crc kubenswrapper[4797]: I0104 12:11:08.985844 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:09 crc kubenswrapper[4797]: I0104 12:11:09.087371 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6b0ee0c0-87fe-415c-acd9-fe2489411ea1","Type":"ContainerStarted","Data":"8e33a2271ad8c549b0dc453bb191bc49a32ef82db8d1b0f8629381d395ab7864"} Jan 04 12:11:09 crc kubenswrapper[4797]: I0104 12:11:09.286615 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:09 crc kubenswrapper[4797]: I0104 12:11:09.487742 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04294055-465d-4e64-b41a-219eee1f50ad" path="/var/lib/kubelet/pods/04294055-465d-4e64-b41a-219eee1f50ad/volumes" Jan 04 12:11:09 crc kubenswrapper[4797]: I0104 12:11:09.488876 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da" path="/var/lib/kubelet/pods/dcf028cf-1e5a-4c31-85eb-1ad9e2d6b7da/volumes" Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.098485 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6b0ee0c0-87fe-415c-acd9-fe2489411ea1","Type":"ContainerStarted","Data":"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70"} Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.101580 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerStarted","Data":"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea"} Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.101619 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerStarted","Data":"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5"} Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.101634 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerStarted","Data":"518c1e3481b096e996020ef10312ba8344946285f821f84d4fa4795cf9fd4f50"} Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.133292 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.133267312 podStartE2EDuration="2.133267312s" podCreationTimestamp="2026-01-04 12:11:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:10.118459105 +0000 UTC m=+1368.975645854" watchObservedRunningTime="2026-01-04 12:11:10.133267312 +0000 UTC m=+1368.990454031" Jan 04 12:11:10 crc kubenswrapper[4797]: I0104 12:11:10.157932 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.157903272 podStartE2EDuration="2.157903272s" podCreationTimestamp="2026-01-04 12:11:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:10.140083351 +0000 UTC m=+1368.997270070" watchObservedRunningTime="2026-01-04 12:11:10.157903272 +0000 UTC m=+1369.015090001" Jan 04 12:11:12 crc kubenswrapper[4797]: I0104 12:11:12.481679 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:13 crc kubenswrapper[4797]: I0104 12:11:13.406683 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:11:13 crc kubenswrapper[4797]: I0104 12:11:13.406754 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:11:13 crc kubenswrapper[4797]: I0104 12:11:13.537906 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:11:14 crc kubenswrapper[4797]: I0104 12:11:14.427304 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:14 crc kubenswrapper[4797]: I0104 12:11:14.427312 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:18 crc kubenswrapper[4797]: I0104 12:11:18.538389 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:11:18 crc kubenswrapper[4797]: I0104 12:11:18.579382 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:11:18 crc kubenswrapper[4797]: I0104 12:11:18.815512 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:18 crc kubenswrapper[4797]: I0104 12:11:18.815624 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.246185 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.492769 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.492844 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.496512 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.497665 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.497931 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666" gracePeriod=600 Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.898138 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:19 crc kubenswrapper[4797]: I0104 12:11:19.898183 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:20 crc kubenswrapper[4797]: I0104 12:11:20.230537 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666" exitCode=0 Jan 04 12:11:20 crc kubenswrapper[4797]: I0104 12:11:20.230640 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666"} Jan 04 12:11:20 crc kubenswrapper[4797]: I0104 12:11:20.230886 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2"} Jan 04 12:11:20 crc kubenswrapper[4797]: I0104 12:11:20.230922 4797 scope.go:117] "RemoveContainer" containerID="1e9fed21743d4b8d9bccd1ce55ca1c3e4c9f1539e2993951b78752693cf46766" Jan 04 12:11:23 crc kubenswrapper[4797]: I0104 12:11:23.418926 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:11:23 crc kubenswrapper[4797]: I0104 12:11:23.419870 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:11:23 crc kubenswrapper[4797]: I0104 12:11:23.430210 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:11:23 crc kubenswrapper[4797]: I0104 12:11:23.430712 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.321129 4797 generic.go:334] "Generic (PLEG): container finished" podID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" containerID="5eb75630dfbca09b68903f956d04849bc104e20fc6589774c3712547886605a9" exitCode=137 Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.321618 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5d4aa7d-4d04-4811-9220-0100fb7cf98e","Type":"ContainerDied","Data":"5eb75630dfbca09b68903f956d04849bc104e20fc6589774c3712547886605a9"} Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.632701 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.713547 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbzh5\" (UniqueName: \"kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5\") pod \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.713736 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle\") pod \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.713973 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data\") pod \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\" (UID: \"e5d4aa7d-4d04-4811-9220-0100fb7cf98e\") " Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.720942 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5" (OuterVolumeSpecName: "kube-api-access-nbzh5") pod "e5d4aa7d-4d04-4811-9220-0100fb7cf98e" (UID: "e5d4aa7d-4d04-4811-9220-0100fb7cf98e"). InnerVolumeSpecName "kube-api-access-nbzh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.754662 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data" (OuterVolumeSpecName: "config-data") pod "e5d4aa7d-4d04-4811-9220-0100fb7cf98e" (UID: "e5d4aa7d-4d04-4811-9220-0100fb7cf98e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.761572 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5d4aa7d-4d04-4811-9220-0100fb7cf98e" (UID: "e5d4aa7d-4d04-4811-9220-0100fb7cf98e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.816166 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.816199 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbzh5\" (UniqueName: \"kubernetes.io/projected/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-kube-api-access-nbzh5\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:26 crc kubenswrapper[4797]: I0104 12:11:26.816210 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d4aa7d-4d04-4811-9220-0100fb7cf98e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.333079 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5d4aa7d-4d04-4811-9220-0100fb7cf98e","Type":"ContainerDied","Data":"6944ad56e11adadc405242b913cb6ee0a1314799741bff2528d907355b840037"} Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.333147 4797 scope.go:117] "RemoveContainer" containerID="5eb75630dfbca09b68903f956d04849bc104e20fc6589774c3712547886605a9" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.333199 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.386027 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.413524 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.431913 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:27 crc kubenswrapper[4797]: E0104 12:11:27.432335 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.432350 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.432527 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.433097 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.433166 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.462427 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.462850 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.462857 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.493363 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d4aa7d-4d04-4811-9220-0100fb7cf98e" path="/var/lib/kubelet/pods/e5d4aa7d-4d04-4811-9220-0100fb7cf98e/volumes" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.562594 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.562760 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzcn2\" (UniqueName: \"kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.562832 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.562899 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.563065 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.665002 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.665338 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzcn2\" (UniqueName: \"kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.665453 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.665575 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.665676 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.673787 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.677579 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.680864 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.681478 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.682107 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzcn2\" (UniqueName: \"kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:27 crc kubenswrapper[4797]: I0104 12:11:27.777882 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.284906 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.345541 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6ef2789-7ab1-46b1-852e-5bbb106b4044","Type":"ContainerStarted","Data":"2127fbb355294cb08d15d7f40fa077e0ede282b05a12d346e356e92238f327fb"} Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.819653 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.820379 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.822695 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:11:28 crc kubenswrapper[4797]: I0104 12:11:28.825190 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.359882 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6ef2789-7ab1-46b1-852e-5bbb106b4044","Type":"ContainerStarted","Data":"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f"} Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.360291 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.373741 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.397415 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.397388299 podStartE2EDuration="2.397388299s" podCreationTimestamp="2026-01-04 12:11:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:29.385731401 +0000 UTC m=+1388.242918150" watchObservedRunningTime="2026-01-04 12:11:29.397388299 +0000 UTC m=+1388.254575048" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.626191 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.635693 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.694181 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721080 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721454 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721596 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28x9z\" (UniqueName: \"kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721668 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721764 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.721872 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823175 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823262 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28x9z\" (UniqueName: \"kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823296 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823362 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823426 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.823448 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.824496 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.824588 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.824940 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.824960 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.825190 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.842434 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28x9z\" (UniqueName: \"kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z\") pod \"dnsmasq-dns-867cd545c7-6sldr\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:29 crc kubenswrapper[4797]: I0104 12:11:29.973830 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:30 crc kubenswrapper[4797]: I0104 12:11:30.428475 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.392734 4797 generic.go:334] "Generic (PLEG): container finished" podID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerID="942a2a207d33a194350ddb61a268b463381597f7aa1bb2b9c422d7f35718c86b" exitCode=0 Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.392913 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" event={"ID":"8d097117-3af5-47a7-bcbd-ad887d0972df","Type":"ContainerDied","Data":"942a2a207d33a194350ddb61a268b463381597f7aa1bb2b9c422d7f35718c86b"} Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.394057 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" event={"ID":"8d097117-3af5-47a7-bcbd-ad887d0972df","Type":"ContainerStarted","Data":"459dc42a5bf1f9a83e4b075b0c3e22e1d7173609fd4a2b9868696a3d21529525"} Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.650152 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.650593 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-central-agent" containerID="cri-o://15bea94fea63ace192dfc26d8e02b1edc4fe12a5c7f970d0889db6fa26d86edf" gracePeriod=30 Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.651095 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="proxy-httpd" containerID="cri-o://d5bfda83e1144c987cb1ddfd72ecdfdce7d03221023306091c8029dc7bea793f" gracePeriod=30 Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.651183 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="sg-core" containerID="cri-o://22752eb1ddb051967b6c0036a2cf45902cbf1100e84fcd74f027afd664671caf" gracePeriod=30 Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.651314 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-notification-agent" containerID="cri-o://b938370d51f4a430f6412957c6f857f3fdff9becd007e40941b645df2780bb31" gracePeriod=30 Jan 04 12:11:31 crc kubenswrapper[4797]: I0104 12:11:31.915651 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.268402 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.177:3000/\": dial tcp 10.217.0.177:3000: connect: connection refused" Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402236 4797 generic.go:334] "Generic (PLEG): container finished" podID="b258d8da-50e6-42ec-84c6-0d324f227545" containerID="d5bfda83e1144c987cb1ddfd72ecdfdce7d03221023306091c8029dc7bea793f" exitCode=0 Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402265 4797 generic.go:334] "Generic (PLEG): container finished" podID="b258d8da-50e6-42ec-84c6-0d324f227545" containerID="22752eb1ddb051967b6c0036a2cf45902cbf1100e84fcd74f027afd664671caf" exitCode=2 Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402274 4797 generic.go:334] "Generic (PLEG): container finished" podID="b258d8da-50e6-42ec-84c6-0d324f227545" containerID="15bea94fea63ace192dfc26d8e02b1edc4fe12a5c7f970d0889db6fa26d86edf" exitCode=0 Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402304 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerDied","Data":"d5bfda83e1144c987cb1ddfd72ecdfdce7d03221023306091c8029dc7bea793f"} Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402327 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerDied","Data":"22752eb1ddb051967b6c0036a2cf45902cbf1100e84fcd74f027afd664671caf"} Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.402340 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerDied","Data":"15bea94fea63ace192dfc26d8e02b1edc4fe12a5c7f970d0889db6fa26d86edf"} Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.403711 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" event={"ID":"8d097117-3af5-47a7-bcbd-ad887d0972df","Type":"ContainerStarted","Data":"1e8c12d06a30d46110e5c3864a386ee61f29f7a73cce977df15092fc9de6dac5"} Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.403807 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-log" containerID="cri-o://c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5" gracePeriod=30 Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.404012 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-api" containerID="cri-o://26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea" gracePeriod=30 Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.427623 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" podStartSLOduration=3.427604529 podStartE2EDuration="3.427604529s" podCreationTimestamp="2026-01-04 12:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:32.423804045 +0000 UTC m=+1391.280990764" watchObservedRunningTime="2026-01-04 12:11:32.427604529 +0000 UTC m=+1391.284791248" Jan 04 12:11:32 crc kubenswrapper[4797]: I0104 12:11:32.778238 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:33 crc kubenswrapper[4797]: I0104 12:11:33.415049 4797 generic.go:334] "Generic (PLEG): container finished" podID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerID="c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5" exitCode=143 Jan 04 12:11:33 crc kubenswrapper[4797]: I0104 12:11:33.415149 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerDied","Data":"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5"} Jan 04 12:11:33 crc kubenswrapper[4797]: I0104 12:11:33.415755 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.018018 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.133769 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data\") pod \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.133879 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs\") pod \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.133999 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle\") pod \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.134037 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtvj2\" (UniqueName: \"kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2\") pod \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\" (UID: \"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d\") " Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.134932 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs" (OuterVolumeSpecName: "logs") pod "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" (UID: "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.139846 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2" (OuterVolumeSpecName: "kube-api-access-rtvj2") pod "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" (UID: "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d"). InnerVolumeSpecName "kube-api-access-rtvj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.166864 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data" (OuterVolumeSpecName: "config-data") pod "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" (UID: "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.184137 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" (UID: "a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.235875 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.235927 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.235937 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.235945 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtvj2\" (UniqueName: \"kubernetes.io/projected/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d-kube-api-access-rtvj2\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.441722 4797 generic.go:334] "Generic (PLEG): container finished" podID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerID="26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea" exitCode=0 Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.441774 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerDied","Data":"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea"} Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.441842 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d","Type":"ContainerDied","Data":"518c1e3481b096e996020ef10312ba8344946285f821f84d4fa4795cf9fd4f50"} Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.441864 4797 scope.go:117] "RemoveContainer" containerID="26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.441789 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.466908 4797 scope.go:117] "RemoveContainer" containerID="c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.468439 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.478207 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.490559 4797 scope.go:117] "RemoveContainer" containerID="26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea" Jan 04 12:11:36 crc kubenswrapper[4797]: E0104 12:11:36.491076 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea\": container with ID starting with 26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea not found: ID does not exist" containerID="26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.491109 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea"} err="failed to get container status \"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea\": rpc error: code = NotFound desc = could not find container \"26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea\": container with ID starting with 26d3cbdc8e2768628acddb86e33f8df369d034e73ac5e5fa2108bd3bc16e4bea not found: ID does not exist" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.491130 4797 scope.go:117] "RemoveContainer" containerID="c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5" Jan 04 12:11:36 crc kubenswrapper[4797]: E0104 12:11:36.491467 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5\": container with ID starting with c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5 not found: ID does not exist" containerID="c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.491505 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5"} err="failed to get container status \"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5\": rpc error: code = NotFound desc = could not find container \"c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5\": container with ID starting with c27339e34738e4f38e266370f7d8f63c0a83d7627c5b0fd30f03819a2c00aad5 not found: ID does not exist" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.496361 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:36 crc kubenswrapper[4797]: E0104 12:11:36.498687 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-api" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.498709 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-api" Jan 04 12:11:36 crc kubenswrapper[4797]: E0104 12:11:36.498750 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-log" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.498757 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-log" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.498937 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-log" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.498963 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" containerName="nova-api-api" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.510058 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.513091 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.513412 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.522932 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.532218 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.646875 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.646944 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.646971 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.647061 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.647461 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.647525 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbkcz\" (UniqueName: \"kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.749841 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.749887 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.749941 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.750023 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.750046 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbkcz\" (UniqueName: \"kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.750093 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.750502 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.755452 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.755483 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.755541 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.758488 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.784705 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbkcz\" (UniqueName: \"kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz\") pod \"nova-api-0\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " pod="openstack/nova-api-0" Jan 04 12:11:36 crc kubenswrapper[4797]: I0104 12:11:36.846974 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.276783 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.465039 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerStarted","Data":"cc12c73d09bcdc9aaa69aea1339ae19dbee60a7952827bb70d18ca765c5f1303"} Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.473922 4797 generic.go:334] "Generic (PLEG): container finished" podID="b258d8da-50e6-42ec-84c6-0d324f227545" containerID="b938370d51f4a430f6412957c6f857f3fdff9becd007e40941b645df2780bb31" exitCode=0 Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.483485 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d" path="/var/lib/kubelet/pods/a11d8523-ba16-4a1e-a1dc-1dbdcb2ef51d/volumes" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.484319 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerDied","Data":"b938370d51f4a430f6412957c6f857f3fdff9becd007e40941b645df2780bb31"} Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.576904 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673499 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673577 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673680 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673740 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673779 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673864 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkxv6\" (UniqueName: \"kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673885 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.673925 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml\") pod \"b258d8da-50e6-42ec-84c6-0d324f227545\" (UID: \"b258d8da-50e6-42ec-84c6-0d324f227545\") " Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.677014 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.677253 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.680267 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6" (OuterVolumeSpecName: "kube-api-access-fkxv6") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "kube-api-access-fkxv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.681225 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts" (OuterVolumeSpecName: "scripts") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.717205 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.746402 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.759349 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777129 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777161 4797 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777171 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777182 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkxv6\" (UniqueName: \"kubernetes.io/projected/b258d8da-50e6-42ec-84c6-0d324f227545-kube-api-access-fkxv6\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777190 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777199 4797 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.777206 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b258d8da-50e6-42ec-84c6-0d324f227545-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.778386 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.800273 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data" (OuterVolumeSpecName: "config-data") pod "b258d8da-50e6-42ec-84c6-0d324f227545" (UID: "b258d8da-50e6-42ec-84c6-0d324f227545"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.800336 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:37 crc kubenswrapper[4797]: I0104 12:11:37.878518 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b258d8da-50e6-42ec-84c6-0d324f227545-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.484638 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerStarted","Data":"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5"} Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.484677 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerStarted","Data":"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691"} Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.489977 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b258d8da-50e6-42ec-84c6-0d324f227545","Type":"ContainerDied","Data":"da218aa1ec4703d3a0dcbfaf72c093b6c819dddbd7fd4bce6497b17b42f0d5b3"} Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.490077 4797 scope.go:117] "RemoveContainer" containerID="d5bfda83e1144c987cb1ddfd72ecdfdce7d03221023306091c8029dc7bea793f" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.490075 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.518536 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.529479 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.5294495550000002 podStartE2EDuration="2.529449555s" podCreationTimestamp="2026-01-04 12:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:38.509132292 +0000 UTC m=+1397.366319021" watchObservedRunningTime="2026-01-04 12:11:38.529449555 +0000 UTC m=+1397.386636294" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.531600 4797 scope.go:117] "RemoveContainer" containerID="22752eb1ddb051967b6c0036a2cf45902cbf1100e84fcd74f027afd664671caf" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.575008 4797 scope.go:117] "RemoveContainer" containerID="b938370d51f4a430f6412957c6f857f3fdff9becd007e40941b645df2780bb31" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.604792 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.625794 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.638479 4797 scope.go:117] "RemoveContainer" containerID="15bea94fea63ace192dfc26d8e02b1edc4fe12a5c7f970d0889db6fa26d86edf" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640103 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:38 crc kubenswrapper[4797]: E0104 12:11:38.640519 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="sg-core" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640533 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="sg-core" Jan 04 12:11:38 crc kubenswrapper[4797]: E0104 12:11:38.640549 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-notification-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640556 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-notification-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: E0104 12:11:38.640564 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-central-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640570 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-central-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: E0104 12:11:38.640587 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="proxy-httpd" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640593 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="proxy-httpd" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640760 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="proxy-httpd" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640776 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="sg-core" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640787 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-notification-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.640806 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" containerName="ceilometer-central-agent" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.642402 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.644862 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.645281 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.645454 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.670392 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.707727 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-d29k2"] Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.708949 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.719409 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.720233 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.735131 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-d29k2"] Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792403 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvflq\" (UniqueName: \"kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792509 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792580 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792670 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792727 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792746 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpwvw\" (UniqueName: \"kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792766 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792839 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792879 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.792947 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.793034 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.793081 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894266 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894337 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894361 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894383 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvflq\" (UniqueName: \"kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894418 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894452 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894503 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894525 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894541 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpwvw\" (UniqueName: \"kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894558 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894590 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.894607 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.896290 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.896807 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.900760 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.906633 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.914121 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.915346 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.915602 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.916882 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.920251 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpwvw\" (UniqueName: \"kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.920652 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.922467 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvflq\" (UniqueName: \"kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq\") pod \"ceilometer-0\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " pod="openstack/ceilometer-0" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.938715 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-d29k2\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:38 crc kubenswrapper[4797]: I0104 12:11:38.980164 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.030305 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.425881 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.429641 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.489483 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b258d8da-50e6-42ec-84c6-0d324f227545" path="/var/lib/kubelet/pods/b258d8da-50e6-42ec-84c6-0d324f227545/volumes" Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.501963 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerStarted","Data":"5e342d3f9fb9be28dbf8d5bf6129f4af45768faa49c3b73cf8842b9fabc0e09a"} Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.533445 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-d29k2"] Jan 04 12:11:39 crc kubenswrapper[4797]: W0104 12:11:39.538826 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1776cf53_feb7_4695_a910_c74a47e30dec.slice/crio-eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8 WatchSource:0}: Error finding container eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8: Status 404 returned error can't find the container with id eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8 Jan 04 12:11:39 crc kubenswrapper[4797]: I0104 12:11:39.977152 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.059394 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.060003 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="dnsmasq-dns" containerID="cri-o://5a8f25d330d8d93b94b4f89f5ab02f3d9ff1f469e845e6f615fa4dfc0aa9ad40" gracePeriod=10 Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.508925 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerStarted","Data":"5e95861f05a86916d5bbae50b4b2c7dbc814b737dab7253f6895bdb7c85967b0"} Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.511536 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-d29k2" event={"ID":"1776cf53-feb7-4695-a910-c74a47e30dec","Type":"ContainerStarted","Data":"d42b3b430ecab50d28d72871ff41ee2a8381b2e11c2e8f2a5b95be60179f555e"} Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.511583 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-d29k2" event={"ID":"1776cf53-feb7-4695-a910-c74a47e30dec","Type":"ContainerStarted","Data":"eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8"} Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.515662 4797 generic.go:334] "Generic (PLEG): container finished" podID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerID="5a8f25d330d8d93b94b4f89f5ab02f3d9ff1f469e845e6f615fa4dfc0aa9ad40" exitCode=0 Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.515761 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" event={"ID":"977fcbf9-a952-4536-92ed-c4bc6dd86887","Type":"ContainerDied","Data":"5a8f25d330d8d93b94b4f89f5ab02f3d9ff1f469e845e6f615fa4dfc0aa9ad40"} Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.515814 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" event={"ID":"977fcbf9-a952-4536-92ed-c4bc6dd86887","Type":"ContainerDied","Data":"9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743"} Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.515833 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9255b99bfaa69e06f4bcfa270d0127a19a8d8a394867072f50e1f093ea84f743" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.528116 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-d29k2" podStartSLOduration=2.52807784 podStartE2EDuration="2.52807784s" podCreationTimestamp="2026-01-04 12:11:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:40.524523772 +0000 UTC m=+1399.381710481" watchObservedRunningTime="2026-01-04 12:11:40.52807784 +0000 UTC m=+1399.385264549" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.561600 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637403 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637522 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637666 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637773 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrr5c\" (UniqueName: \"kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637811 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.637863 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb\") pod \"977fcbf9-a952-4536-92ed-c4bc6dd86887\" (UID: \"977fcbf9-a952-4536-92ed-c4bc6dd86887\") " Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.643172 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c" (OuterVolumeSpecName: "kube-api-access-xrr5c") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "kube-api-access-xrr5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.696266 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config" (OuterVolumeSpecName: "config") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.726556 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.734358 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.741222 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrr5c\" (UniqueName: \"kubernetes.io/projected/977fcbf9-a952-4536-92ed-c4bc6dd86887-kube-api-access-xrr5c\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.741251 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.741260 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.741269 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.743434 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.755759 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "977fcbf9-a952-4536-92ed-c4bc6dd86887" (UID: "977fcbf9-a952-4536-92ed-c4bc6dd86887"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.872857 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:40 crc kubenswrapper[4797]: I0104 12:11:40.873158 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/977fcbf9-a952-4536-92ed-c4bc6dd86887-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:41 crc kubenswrapper[4797]: I0104 12:11:41.533149 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfb54f9b5-dd9vt" Jan 04 12:11:41 crc kubenswrapper[4797]: I0104 12:11:41.534540 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerStarted","Data":"c8a43ccc510e54f1604c2a6c7afb923307b8eda1f1c77a645703c76df3b2be10"} Jan 04 12:11:41 crc kubenswrapper[4797]: I0104 12:11:41.534597 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerStarted","Data":"4516e73ced3267162e550e03f293cb70e85913d200f7bae5f299b8b55ebea07c"} Jan 04 12:11:41 crc kubenswrapper[4797]: I0104 12:11:41.562316 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:11:41 crc kubenswrapper[4797]: I0104 12:11:41.574477 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bfb54f9b5-dd9vt"] Jan 04 12:11:43 crc kubenswrapper[4797]: I0104 12:11:43.482932 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" path="/var/lib/kubelet/pods/977fcbf9-a952-4536-92ed-c4bc6dd86887/volumes" Jan 04 12:11:43 crc kubenswrapper[4797]: I0104 12:11:43.556247 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerStarted","Data":"1c0fd3972d9964ce21b2d52308680c08c54401c7fccda3b5fdc0b6f2f4fc3b91"} Jan 04 12:11:43 crc kubenswrapper[4797]: I0104 12:11:43.556629 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:11:43 crc kubenswrapper[4797]: I0104 12:11:43.577705 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.225800267 podStartE2EDuration="5.57768766s" podCreationTimestamp="2026-01-04 12:11:38 +0000 UTC" firstStartedPulling="2026-01-04 12:11:39.425686041 +0000 UTC m=+1398.282872740" lastFinishedPulling="2026-01-04 12:11:42.777573424 +0000 UTC m=+1401.634760133" observedRunningTime="2026-01-04 12:11:43.577167357 +0000 UTC m=+1402.434354106" watchObservedRunningTime="2026-01-04 12:11:43.57768766 +0000 UTC m=+1402.434874369" Jan 04 12:11:44 crc kubenswrapper[4797]: I0104 12:11:44.568077 4797 generic.go:334] "Generic (PLEG): container finished" podID="1776cf53-feb7-4695-a910-c74a47e30dec" containerID="d42b3b430ecab50d28d72871ff41ee2a8381b2e11c2e8f2a5b95be60179f555e" exitCode=0 Jan 04 12:11:44 crc kubenswrapper[4797]: I0104 12:11:44.568231 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-d29k2" event={"ID":"1776cf53-feb7-4695-a910-c74a47e30dec","Type":"ContainerDied","Data":"d42b3b430ecab50d28d72871ff41ee2a8381b2e11c2e8f2a5b95be60179f555e"} Jan 04 12:11:45 crc kubenswrapper[4797]: I0104 12:11:45.994437 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.090492 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle\") pod \"1776cf53-feb7-4695-a910-c74a47e30dec\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.090784 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts\") pod \"1776cf53-feb7-4695-a910-c74a47e30dec\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.090832 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data\") pod \"1776cf53-feb7-4695-a910-c74a47e30dec\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.090864 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpwvw\" (UniqueName: \"kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw\") pod \"1776cf53-feb7-4695-a910-c74a47e30dec\" (UID: \"1776cf53-feb7-4695-a910-c74a47e30dec\") " Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.095581 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw" (OuterVolumeSpecName: "kube-api-access-rpwvw") pod "1776cf53-feb7-4695-a910-c74a47e30dec" (UID: "1776cf53-feb7-4695-a910-c74a47e30dec"). InnerVolumeSpecName "kube-api-access-rpwvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.096996 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts" (OuterVolumeSpecName: "scripts") pod "1776cf53-feb7-4695-a910-c74a47e30dec" (UID: "1776cf53-feb7-4695-a910-c74a47e30dec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.116474 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1776cf53-feb7-4695-a910-c74a47e30dec" (UID: "1776cf53-feb7-4695-a910-c74a47e30dec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.129826 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data" (OuterVolumeSpecName: "config-data") pod "1776cf53-feb7-4695-a910-c74a47e30dec" (UID: "1776cf53-feb7-4695-a910-c74a47e30dec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.192926 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.192961 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.192971 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1776cf53-feb7-4695-a910-c74a47e30dec-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.192998 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpwvw\" (UniqueName: \"kubernetes.io/projected/1776cf53-feb7-4695-a910-c74a47e30dec-kube-api-access-rpwvw\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.592269 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-d29k2" event={"ID":"1776cf53-feb7-4695-a910-c74a47e30dec","Type":"ContainerDied","Data":"eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8"} Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.592348 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb47995a3592a65534fc3cec29dcafcdc8e7dfd5d29bc84aa2318402f18442f8" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.592394 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-d29k2" Jan 04 12:11:46 crc kubenswrapper[4797]: E0104 12:11:46.685550 4797 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1776cf53_feb7_4695_a910_c74a47e30dec.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.773402 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.774457 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-log" containerID="cri-o://72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" gracePeriod=30 Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.774513 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-api" containerID="cri-o://57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" gracePeriod=30 Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.791167 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.791381 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerName="nova-scheduler-scheduler" containerID="cri-o://4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" gracePeriod=30 Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.833404 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.833659 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" containerID="cri-o://76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c" gracePeriod=30 Jan 04 12:11:46 crc kubenswrapper[4797]: I0104 12:11:46.833763 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" containerID="cri-o://5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62" gracePeriod=30 Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.406275 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.525514 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.525602 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.525627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbkcz\" (UniqueName: \"kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.525672 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.525736 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.526270 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs" (OuterVolumeSpecName: "logs") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.526373 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data\") pod \"1b45a044-cbf3-4b4c-887d-b76768c3670e\" (UID: \"1b45a044-cbf3-4b4c-887d-b76768c3670e\") " Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.528163 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b45a044-cbf3-4b4c-887d-b76768c3670e-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.531214 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz" (OuterVolumeSpecName: "kube-api-access-tbkcz") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "kube-api-access-tbkcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.567604 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data" (OuterVolumeSpecName: "config-data") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.572049 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.601496 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605247 4797 generic.go:334] "Generic (PLEG): container finished" podID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerID="57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" exitCode=0 Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605279 4797 generic.go:334] "Generic (PLEG): container finished" podID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerID="72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" exitCode=143 Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605317 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerDied","Data":"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691"} Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605342 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerDied","Data":"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5"} Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605350 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1b45a044-cbf3-4b4c-887d-b76768c3670e","Type":"ContainerDied","Data":"cc12c73d09bcdc9aaa69aea1339ae19dbee60a7952827bb70d18ca765c5f1303"} Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605380 4797 scope.go:117] "RemoveContainer" containerID="57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.605489 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.609308 4797 generic.go:334] "Generic (PLEG): container finished" podID="2616e008-314a-449d-a324-5de96bcf8379" containerID="76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c" exitCode=143 Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.609418 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerDied","Data":"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c"} Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.617215 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1b45a044-cbf3-4b4c-887d-b76768c3670e" (UID: "1b45a044-cbf3-4b4c-887d-b76768c3670e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.629937 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.629977 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.630004 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbkcz\" (UniqueName: \"kubernetes.io/projected/1b45a044-cbf3-4b4c-887d-b76768c3670e-kube-api-access-tbkcz\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.630018 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.630092 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b45a044-cbf3-4b4c-887d-b76768c3670e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.648953 4797 scope.go:117] "RemoveContainer" containerID="72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.666850 4797 scope.go:117] "RemoveContainer" containerID="57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.667317 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691\": container with ID starting with 57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691 not found: ID does not exist" containerID="57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.667346 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691"} err="failed to get container status \"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691\": rpc error: code = NotFound desc = could not find container \"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691\": container with ID starting with 57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.667366 4797 scope.go:117] "RemoveContainer" containerID="72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.668190 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5\": container with ID starting with 72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5 not found: ID does not exist" containerID="72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.668245 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5"} err="failed to get container status \"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5\": rpc error: code = NotFound desc = could not find container \"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5\": container with ID starting with 72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.668280 4797 scope.go:117] "RemoveContainer" containerID="57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.668605 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691"} err="failed to get container status \"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691\": rpc error: code = NotFound desc = could not find container \"57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691\": container with ID starting with 57fb1ba91575962a442e148cd0e53d23e6235211f8e3991e21d167b9dd732691 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.668628 4797 scope.go:117] "RemoveContainer" containerID="72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.668905 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5"} err="failed to get container status \"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5\": rpc error: code = NotFound desc = could not find container \"72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5\": container with ID starting with 72b9f06ca135b6863b085bf81980ecf96251d50779ed0b685d0329b9c87ae7f5 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.940735 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.954830 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970096 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.970484 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1776cf53-feb7-4695-a910-c74a47e30dec" containerName="nova-manage" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970502 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1776cf53-feb7-4695-a910-c74a47e30dec" containerName="nova-manage" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.970528 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-log" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970534 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-log" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.970563 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="dnsmasq-dns" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970572 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="dnsmasq-dns" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.970658 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-api" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970668 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-api" Jan 04 12:11:47 crc kubenswrapper[4797]: E0104 12:11:47.970687 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="init" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970692 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="init" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970849 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-api" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970869 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="977fcbf9-a952-4536-92ed-c4bc6dd86887" containerName="dnsmasq-dns" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970886 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1776cf53-feb7-4695-a910-c74a47e30dec" containerName="nova-manage" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.970895 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" containerName="nova-api-log" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.971818 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.975268 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.975559 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.978504 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 04 12:11:47 crc kubenswrapper[4797]: I0104 12:11:47.991659 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.036937 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.037155 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xdd\" (UniqueName: \"kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.037252 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.037374 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.037425 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.037458 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.138720 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.138769 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xdd\" (UniqueName: \"kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.138813 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.138875 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.139490 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.139504 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.139545 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.142883 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.143136 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.144556 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.144763 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.159520 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xdd\" (UniqueName: \"kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd\") pod \"nova-api-0\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.294469 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[4797]: E0104 12:11:48.539844 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:48 crc kubenswrapper[4797]: E0104 12:11:48.541817 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:48 crc kubenswrapper[4797]: E0104 12:11:48.543162 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:48 crc kubenswrapper[4797]: E0104 12:11:48.543232 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerName="nova-scheduler-scheduler" Jan 04 12:11:48 crc kubenswrapper[4797]: W0104 12:11:48.736335 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7bef264_130e_4b89_ae25_bff622d12a16.slice/crio-b184fed43afcf9d8ca7dc2f24b4ae0105023b2b4e798de2ff00864a697986fab WatchSource:0}: Error finding container b184fed43afcf9d8ca7dc2f24b4ae0105023b2b4e798de2ff00864a697986fab: Status 404 returned error can't find the container with id b184fed43afcf9d8ca7dc2f24b4ae0105023b2b4e798de2ff00864a697986fab Jan 04 12:11:48 crc kubenswrapper[4797]: I0104 12:11:48.743279 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.485347 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b45a044-cbf3-4b4c-887d-b76768c3670e" path="/var/lib/kubelet/pods/1b45a044-cbf3-4b4c-887d-b76768c3670e/volumes" Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.633706 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerStarted","Data":"b3e1a2411d4524d7382c2ff29d5707c5d826d0551904e8a9f2a1500c0fabd198"} Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.633763 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerStarted","Data":"4ffd1060deed2e4837e76d50f06b6b55d7a3082c39494946544e96c70503bbad"} Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.633784 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerStarted","Data":"b184fed43afcf9d8ca7dc2f24b4ae0105023b2b4e798de2ff00864a697986fab"} Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.672031 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.67201421 podStartE2EDuration="2.67201421s" podCreationTimestamp="2026-01-04 12:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:49.656320901 +0000 UTC m=+1408.513507630" watchObservedRunningTime="2026-01-04 12:11:49.67201421 +0000 UTC m=+1408.529200909" Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.981121 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:54656->10.217.0.190:8775: read: connection reset by peer" Jan 04 12:11:49 crc kubenswrapper[4797]: I0104 12:11:49.981183 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:54648->10.217.0.190:8775: read: connection reset by peer" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.056417 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.059245 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.087667 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.176981 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.177051 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.177175 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgh6j\" (UniqueName: \"kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.278725 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgh6j\" (UniqueName: \"kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.278925 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.278961 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.279533 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.280144 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.302334 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgh6j\" (UniqueName: \"kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j\") pod \"redhat-operators-xb9sl\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.434816 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.466805 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.583434 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn96l\" (UniqueName: \"kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l\") pod \"2616e008-314a-449d-a324-5de96bcf8379\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.583542 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle\") pod \"2616e008-314a-449d-a324-5de96bcf8379\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.583600 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data\") pod \"2616e008-314a-449d-a324-5de96bcf8379\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.583627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs\") pod \"2616e008-314a-449d-a324-5de96bcf8379\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.583701 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs\") pod \"2616e008-314a-449d-a324-5de96bcf8379\" (UID: \"2616e008-314a-449d-a324-5de96bcf8379\") " Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.585576 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs" (OuterVolumeSpecName: "logs") pod "2616e008-314a-449d-a324-5de96bcf8379" (UID: "2616e008-314a-449d-a324-5de96bcf8379"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.594706 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l" (OuterVolumeSpecName: "kube-api-access-rn96l") pod "2616e008-314a-449d-a324-5de96bcf8379" (UID: "2616e008-314a-449d-a324-5de96bcf8379"). InnerVolumeSpecName "kube-api-access-rn96l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.615269 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2616e008-314a-449d-a324-5de96bcf8379" (UID: "2616e008-314a-449d-a324-5de96bcf8379"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.617893 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data" (OuterVolumeSpecName: "config-data") pod "2616e008-314a-449d-a324-5de96bcf8379" (UID: "2616e008-314a-449d-a324-5de96bcf8379"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.652960 4797 generic.go:334] "Generic (PLEG): container finished" podID="2616e008-314a-449d-a324-5de96bcf8379" containerID="5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62" exitCode=0 Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.655250 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.655253 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerDied","Data":"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62"} Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.655383 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2616e008-314a-449d-a324-5de96bcf8379","Type":"ContainerDied","Data":"c54942f78807d773282f94c7fb89ad442019056de8e649e3452d8257a496a514"} Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.655410 4797 scope.go:117] "RemoveContainer" containerID="5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.670106 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "2616e008-314a-449d-a324-5de96bcf8379" (UID: "2616e008-314a-449d-a324-5de96bcf8379"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.686444 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.686507 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.686648 4797 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2616e008-314a-449d-a324-5de96bcf8379-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.686662 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2616e008-314a-449d-a324-5de96bcf8379-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.686672 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn96l\" (UniqueName: \"kubernetes.io/projected/2616e008-314a-449d-a324-5de96bcf8379-kube-api-access-rn96l\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.694867 4797 scope.go:117] "RemoveContainer" containerID="76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.720136 4797 scope.go:117] "RemoveContainer" containerID="5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62" Jan 04 12:11:50 crc kubenswrapper[4797]: E0104 12:11:50.720825 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62\": container with ID starting with 5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62 not found: ID does not exist" containerID="5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.720865 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62"} err="failed to get container status \"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62\": rpc error: code = NotFound desc = could not find container \"5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62\": container with ID starting with 5ad4b313d0166f39234ccd69d78a80348944d4e35246b13db381b2b3fac38d62 not found: ID does not exist" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.720891 4797 scope.go:117] "RemoveContainer" containerID="76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c" Jan 04 12:11:50 crc kubenswrapper[4797]: E0104 12:11:50.722019 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c\": container with ID starting with 76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c not found: ID does not exist" containerID="76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.722044 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c"} err="failed to get container status \"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c\": rpc error: code = NotFound desc = could not find container \"76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c\": container with ID starting with 76fa4ca85f08a1a54dbedb907b8a78ea09b921564defa524a3f1ce241116a99c not found: ID does not exist" Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.932232 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:11:50 crc kubenswrapper[4797]: I0104 12:11:50.992723 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.014349 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.034300 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:51 crc kubenswrapper[4797]: E0104 12:11:51.034670 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.034685 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" Jan 04 12:11:51 crc kubenswrapper[4797]: E0104 12:11:51.034715 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.034721 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.034883 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-log" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.034918 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2616e008-314a-449d-a324-5de96bcf8379" containerName="nova-metadata-metadata" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.035815 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.042743 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.042785 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.042952 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.195619 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w4db\" (UniqueName: \"kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.195920 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.195949 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.195966 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.196050 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.297898 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.298040 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w4db\" (UniqueName: \"kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.298067 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.298090 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.298110 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.298543 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.304981 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.305276 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.306064 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.316757 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w4db\" (UniqueName: \"kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db\") pod \"nova-metadata-0\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.417858 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.496132 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2616e008-314a-449d-a324-5de96bcf8379" path="/var/lib/kubelet/pods/2616e008-314a-449d-a324-5de96bcf8379/volumes" Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.684220 4797 generic.go:334] "Generic (PLEG): container finished" podID="0f1445de-ef60-418e-ac68-6c16432ef283" containerID="190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c" exitCode=0 Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.684536 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerDied","Data":"190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c"} Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.684575 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerStarted","Data":"51e57e473e21e9a8ab3c45440479ebe4aabd6b55817582b1fc127b9746689670"} Jan 04 12:11:51 crc kubenswrapper[4797]: I0104 12:11:51.885595 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:51 crc kubenswrapper[4797]: W0104 12:11:51.890064 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5dd48a0b_cc19_4d03_9c3c_174d89f504c7.slice/crio-0ac1b0aba7d680563956c694f04467c4de6d473590db594edb1f534eb4764778 WatchSource:0}: Error finding container 0ac1b0aba7d680563956c694f04467c4de6d473590db594edb1f534eb4764778: Status 404 returned error can't find the container with id 0ac1b0aba7d680563956c694f04467c4de6d473590db594edb1f534eb4764778 Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.542103 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.696343 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerStarted","Data":"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.698261 4797 generic.go:334] "Generic (PLEG): container finished" podID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" exitCode=0 Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.698307 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6b0ee0c0-87fe-415c-acd9-fe2489411ea1","Type":"ContainerDied","Data":"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.698375 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6b0ee0c0-87fe-415c-acd9-fe2489411ea1","Type":"ContainerDied","Data":"8e33a2271ad8c549b0dc453bb191bc49a32ef82db8d1b0f8629381d395ab7864"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.698398 4797 scope.go:117] "RemoveContainer" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.698328 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.700562 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerStarted","Data":"b087757871433f8a985387e4d7b03ea17d2cbf5af8c97d5436dc453c3de7fbde"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.700619 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerStarted","Data":"d84880cf79913873bb87aa245c4a430e9d1ba2de7f2b46e8c142037646d023ef"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.700630 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerStarted","Data":"0ac1b0aba7d680563956c694f04467c4de6d473590db594edb1f534eb4764778"} Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.718408 4797 scope.go:117] "RemoveContainer" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" Jan 04 12:11:52 crc kubenswrapper[4797]: E0104 12:11:52.718902 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70\": container with ID starting with 4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70 not found: ID does not exist" containerID="4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.718943 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70"} err="failed to get container status \"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70\": rpc error: code = NotFound desc = could not find container \"4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70\": container with ID starting with 4f0b7c4e7c27ef7415ab0394d82e15bab63b377d0788c753fc0d933847429b70 not found: ID does not exist" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.725901 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle\") pod \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.726054 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z874n\" (UniqueName: \"kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n\") pod \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.726074 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data\") pod \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\" (UID: \"6b0ee0c0-87fe-415c-acd9-fe2489411ea1\") " Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.730252 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n" (OuterVolumeSpecName: "kube-api-access-z874n") pod "6b0ee0c0-87fe-415c-acd9-fe2489411ea1" (UID: "6b0ee0c0-87fe-415c-acd9-fe2489411ea1"). InnerVolumeSpecName "kube-api-access-z874n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.758876 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data" (OuterVolumeSpecName: "config-data") pod "6b0ee0c0-87fe-415c-acd9-fe2489411ea1" (UID: "6b0ee0c0-87fe-415c-acd9-fe2489411ea1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.765488 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b0ee0c0-87fe-415c-acd9-fe2489411ea1" (UID: "6b0ee0c0-87fe-415c-acd9-fe2489411ea1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.828763 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z874n\" (UniqueName: \"kubernetes.io/projected/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-kube-api-access-z874n\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.828797 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:52 crc kubenswrapper[4797]: I0104 12:11:52.828810 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0ee0c0-87fe-415c-acd9-fe2489411ea1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.031195 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.031177933 podStartE2EDuration="3.031177933s" podCreationTimestamp="2026-01-04 12:11:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:52.741347818 +0000 UTC m=+1411.598534527" watchObservedRunningTime="2026-01-04 12:11:53.031177933 +0000 UTC m=+1411.888364642" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.033834 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.041305 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.058705 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:53 crc kubenswrapper[4797]: E0104 12:11:53.059104 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerName="nova-scheduler-scheduler" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.059122 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerName="nova-scheduler-scheduler" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.059306 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" containerName="nova-scheduler-scheduler" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.059881 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.065035 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.081249 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.236516 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.236605 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.236642 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fdjj\" (UniqueName: \"kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.338578 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.338672 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fdjj\" (UniqueName: \"kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.338837 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.345236 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.349504 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.359935 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fdjj\" (UniqueName: \"kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj\") pod \"nova-scheduler-0\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.385941 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:53 crc kubenswrapper[4797]: I0104 12:11:53.496782 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b0ee0c0-87fe-415c-acd9-fe2489411ea1" path="/var/lib/kubelet/pods/6b0ee0c0-87fe-415c-acd9-fe2489411ea1/volumes" Jan 04 12:11:55 crc kubenswrapper[4797]: I0104 12:11:53.731233 4797 generic.go:334] "Generic (PLEG): container finished" podID="0f1445de-ef60-418e-ac68-6c16432ef283" containerID="eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe" exitCode=0 Jan 04 12:11:55 crc kubenswrapper[4797]: I0104 12:11:53.731300 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerDied","Data":"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe"} Jan 04 12:11:55 crc kubenswrapper[4797]: I0104 12:11:55.757395 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerStarted","Data":"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f"} Jan 04 12:11:55 crc kubenswrapper[4797]: I0104 12:11:55.778790 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xb9sl" podStartSLOduration=2.181007919 podStartE2EDuration="5.778763057s" podCreationTimestamp="2026-01-04 12:11:50 +0000 UTC" firstStartedPulling="2026-01-04 12:11:51.693397128 +0000 UTC m=+1410.550583837" lastFinishedPulling="2026-01-04 12:11:55.291152266 +0000 UTC m=+1414.148338975" observedRunningTime="2026-01-04 12:11:55.771861196 +0000 UTC m=+1414.629047915" watchObservedRunningTime="2026-01-04 12:11:55.778763057 +0000 UTC m=+1414.635949806" Jan 04 12:11:55 crc kubenswrapper[4797]: I0104 12:11:55.873035 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:55 crc kubenswrapper[4797]: W0104 12:11:55.885272 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b79ca4c_dde4_4027_b779_ba762e22cb3a.slice/crio-0ed070d1deff0ca1cc23363530db2a00a84ad80d8f09f04afc4fc0d1c796f5c8 WatchSource:0}: Error finding container 0ed070d1deff0ca1cc23363530db2a00a84ad80d8f09f04afc4fc0d1c796f5c8: Status 404 returned error can't find the container with id 0ed070d1deff0ca1cc23363530db2a00a84ad80d8f09f04afc4fc0d1c796f5c8 Jan 04 12:11:56 crc kubenswrapper[4797]: I0104 12:11:56.418376 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:56 crc kubenswrapper[4797]: I0104 12:11:56.418693 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:56 crc kubenswrapper[4797]: I0104 12:11:56.769745 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9b79ca4c-dde4-4027-b779-ba762e22cb3a","Type":"ContainerStarted","Data":"e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc"} Jan 04 12:11:56 crc kubenswrapper[4797]: I0104 12:11:56.769800 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9b79ca4c-dde4-4027-b779-ba762e22cb3a","Type":"ContainerStarted","Data":"0ed070d1deff0ca1cc23363530db2a00a84ad80d8f09f04afc4fc0d1c796f5c8"} Jan 04 12:11:56 crc kubenswrapper[4797]: I0104 12:11:56.799381 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.79936068 podStartE2EDuration="3.79936068s" podCreationTimestamp="2026-01-04 12:11:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:56.79048359 +0000 UTC m=+1415.647670309" watchObservedRunningTime="2026-01-04 12:11:56.79936068 +0000 UTC m=+1415.656547409" Jan 04 12:11:58 crc kubenswrapper[4797]: I0104 12:11:58.295274 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:58 crc kubenswrapper[4797]: I0104 12:11:58.295606 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:58 crc kubenswrapper[4797]: I0104 12:11:58.386032 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:11:59 crc kubenswrapper[4797]: I0104 12:11:59.320159 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:59 crc kubenswrapper[4797]: I0104 12:11:59.320234 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:00 crc kubenswrapper[4797]: I0104 12:12:00.436128 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:00 crc kubenswrapper[4797]: I0104 12:12:00.436641 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:01 crc kubenswrapper[4797]: I0104 12:12:01.418804 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:12:01 crc kubenswrapper[4797]: I0104 12:12:01.418909 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:12:01 crc kubenswrapper[4797]: I0104 12:12:01.514738 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xb9sl" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="registry-server" probeResult="failure" output=< Jan 04 12:12:01 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 12:12:01 crc kubenswrapper[4797]: > Jan 04 12:12:02 crc kubenswrapper[4797]: I0104 12:12:02.436963 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:02 crc kubenswrapper[4797]: I0104 12:12:02.436945 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:03 crc kubenswrapper[4797]: I0104 12:12:03.386351 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:12:03 crc kubenswrapper[4797]: I0104 12:12:03.418682 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:12:03 crc kubenswrapper[4797]: I0104 12:12:03.876623 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.305783 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.306743 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.309102 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.316234 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.916391 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.926098 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[4797]: I0104 12:12:08.994085 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:12:10 crc kubenswrapper[4797]: I0104 12:12:10.506932 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:10 crc kubenswrapper[4797]: I0104 12:12:10.570117 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:10 crc kubenswrapper[4797]: I0104 12:12:10.772576 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:12:11 crc kubenswrapper[4797]: I0104 12:12:11.424103 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:11 crc kubenswrapper[4797]: I0104 12:12:11.424728 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:11 crc kubenswrapper[4797]: I0104 12:12:11.429423 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:11 crc kubenswrapper[4797]: I0104 12:12:11.941135 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xb9sl" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="registry-server" containerID="cri-o://8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f" gracePeriod=2 Jan 04 12:12:11 crc kubenswrapper[4797]: I0104 12:12:11.947861 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.397707 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.525691 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content\") pod \"0f1445de-ef60-418e-ac68-6c16432ef283\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.525764 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities\") pod \"0f1445de-ef60-418e-ac68-6c16432ef283\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.525890 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgh6j\" (UniqueName: \"kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j\") pod \"0f1445de-ef60-418e-ac68-6c16432ef283\" (UID: \"0f1445de-ef60-418e-ac68-6c16432ef283\") " Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.526521 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities" (OuterVolumeSpecName: "utilities") pod "0f1445de-ef60-418e-ac68-6c16432ef283" (UID: "0f1445de-ef60-418e-ac68-6c16432ef283"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.539141 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j" (OuterVolumeSpecName: "kube-api-access-hgh6j") pod "0f1445de-ef60-418e-ac68-6c16432ef283" (UID: "0f1445de-ef60-418e-ac68-6c16432ef283"). InnerVolumeSpecName "kube-api-access-hgh6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.628744 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.628783 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgh6j\" (UniqueName: \"kubernetes.io/projected/0f1445de-ef60-418e-ac68-6c16432ef283-kube-api-access-hgh6j\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.668126 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f1445de-ef60-418e-ac68-6c16432ef283" (UID: "0f1445de-ef60-418e-ac68-6c16432ef283"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.729885 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f1445de-ef60-418e-ac68-6c16432ef283-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.955707 4797 generic.go:334] "Generic (PLEG): container finished" podID="0f1445de-ef60-418e-ac68-6c16432ef283" containerID="8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f" exitCode=0 Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.955818 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerDied","Data":"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f"} Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.955894 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xb9sl" event={"ID":"0f1445de-ef60-418e-ac68-6c16432ef283","Type":"ContainerDied","Data":"51e57e473e21e9a8ab3c45440479ebe4aabd6b55817582b1fc127b9746689670"} Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.955925 4797 scope.go:117] "RemoveContainer" containerID="8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.955838 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xb9sl" Jan 04 12:12:12 crc kubenswrapper[4797]: I0104 12:12:12.999439 4797 scope.go:117] "RemoveContainer" containerID="eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.022585 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.033597 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xb9sl"] Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.039110 4797 scope.go:117] "RemoveContainer" containerID="190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.089038 4797 scope.go:117] "RemoveContainer" containerID="8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f" Jan 04 12:12:13 crc kubenswrapper[4797]: E0104 12:12:13.089594 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f\": container with ID starting with 8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f not found: ID does not exist" containerID="8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.089666 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f"} err="failed to get container status \"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f\": rpc error: code = NotFound desc = could not find container \"8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f\": container with ID starting with 8afb99988a368eac5d306b5fb54071360cd342757c11b13da9e15e610e11bb2f not found: ID does not exist" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.089709 4797 scope.go:117] "RemoveContainer" containerID="eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe" Jan 04 12:12:13 crc kubenswrapper[4797]: E0104 12:12:13.090460 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe\": container with ID starting with eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe not found: ID does not exist" containerID="eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.090511 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe"} err="failed to get container status \"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe\": rpc error: code = NotFound desc = could not find container \"eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe\": container with ID starting with eb564c68287eb645b37a27350ddfd035ed7234401e100d6a46c26a7e68462ffe not found: ID does not exist" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.090539 4797 scope.go:117] "RemoveContainer" containerID="190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c" Jan 04 12:12:13 crc kubenswrapper[4797]: E0104 12:12:13.090862 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c\": container with ID starting with 190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c not found: ID does not exist" containerID="190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.090925 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c"} err="failed to get container status \"190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c\": rpc error: code = NotFound desc = could not find container \"190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c\": container with ID starting with 190735e6959a3576504ff278377684fe85f4adcc4a0d45c26a2bfadafb36ba2c not found: ID does not exist" Jan 04 12:12:13 crc kubenswrapper[4797]: I0104 12:12:13.484405 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" path="/var/lib/kubelet/pods/0f1445de-ef60-418e-ac68-6c16432ef283/volumes" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.993267 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:30 crc kubenswrapper[4797]: E0104 12:12:30.994330 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="extract-content" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.994349 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="extract-content" Jan 04 12:12:30 crc kubenswrapper[4797]: E0104 12:12:30.994382 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="extract-utilities" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.994391 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="extract-utilities" Jan 04 12:12:30 crc kubenswrapper[4797]: E0104 12:12:30.994412 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="registry-server" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.994419 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="registry-server" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.994624 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f1445de-ef60-418e-ac68-6c16432ef283" containerName="registry-server" Jan 04 12:12:30 crc kubenswrapper[4797]: I0104 12:12:30.995397 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.013497 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.013822 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm46n\" (UniqueName: \"kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.013932 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.050452 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.115658 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.115761 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm46n\" (UniqueName: \"kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.116470 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.168693 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm46n\" (UniqueName: \"kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n\") pod \"neutron-e050-account-create-update-kbbx4\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.254904 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e050-account-create-update-5j9cp"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.276240 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e050-account-create-update-5j9cp"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.311637 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.385313 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.409838 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.422617 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkdmd\" (UniqueName: \"kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.422823 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.424704 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.449198 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.531058 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a91a3dfc-cd1e-42e4-b40b-c3f18292b523" path="/var/lib/kubelet/pods/a91a3dfc-cd1e-42e4-b40b-c3f18292b523/volumes" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.531991 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c3c0-account-create-update-846gd"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.540743 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.540851 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkdmd\" (UniqueName: \"kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.542774 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.581154 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.643649 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.643681 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.643693 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-5746959b69-brph4: [secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.643738 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift podName:c044e46d-b79e-4f22-be2d-98408745d63a nodeName:}" failed. No retries permitted until 2026-01-04 12:12:32.143720992 +0000 UTC m=+1451.000907701 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift") pod "swift-proxy-5746959b69-brph4" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a") : [secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.673608 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkdmd\" (UniqueName: \"kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd\") pod \"root-account-create-update-dg6d4\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.715590 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c3c0-account-create-update-846gd"] Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.747131 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.747208 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data podName:1414255a-a94a-4508-aa55-4ad9837afbea nodeName:}" failed. No retries permitted until 2026-01-04 12:12:32.247189013 +0000 UTC m=+1451.104375722 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data") pod "rabbitmq-server-0" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea") : configmap "rabbitmq-config-data" not found Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.771087 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.771339 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="ea5de376-7b66-40d0-8565-c8a34961540b" containerName="openstackclient" containerID="cri-o://a95bc6ae38195a799c2eec6d7e3b1fc38748ec8ad711d67378d10d8eb96a34a6" gracePeriod=2 Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.773092 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.815713 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.824063 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-gc7cq"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.882041 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-gc7cq"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.900078 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.900341 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="ovn-northd" containerID="cri-o://58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" gracePeriod=30 Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.900461 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="openstack-network-exporter" containerID="cri-o://b2a3aa87e9987ca244304550b8c7cb9d2bc8fd403c42e8b33895b09bf3e9a6f0" gracePeriod=30 Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.910401 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:31 crc kubenswrapper[4797]: E0104 12:12:31.910851 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5de376-7b66-40d0-8565-c8a34961540b" containerName="openstackclient" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.910863 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5de376-7b66-40d0-8565-c8a34961540b" containerName="openstackclient" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.911067 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5de376-7b66-40d0-8565-c8a34961540b" containerName="openstackclient" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.911634 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.927079 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.927472 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="openstack-network-exporter" containerID="cri-o://ab503fd970b32577760bbe5ec35f3c0df3a184059f20a5bf6e5b7c34bf2d9638" gracePeriod=300 Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.941842 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7dbzx"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.948774 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.957013 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9lr6\" (UniqueName: \"kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.957091 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.959744 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:31 crc kubenswrapper[4797]: I0104 12:12:31.988644 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7dbzx"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.007943 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0a7e-account-create-update-kv964"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.024630 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-0a7e-account-create-update-kv964"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.061249 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-2hjqq"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.066158 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9lr6\" (UniqueName: \"kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.066228 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.067205 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.084036 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-2hjqq"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.097232 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4q6pn"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.105328 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9lr6\" (UniqueName: \"kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6\") pod \"placement-0a7e-account-create-update-7g9mc\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.133701 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="ovsdbserver-nb" containerID="cri-o://d3af461d9e11838f802cfe0a7de35c8ace652dcb15039a7ab0301617ec90e7f2" gracePeriod=300 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.134907 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4q6pn"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.170090 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-qc984"] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.187638 4797 projected.go:263] Couldn't get secret openstack/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.187679 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.187689 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.187703 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-5746959b69-brph4: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.187748 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift podName:c044e46d-b79e-4f22-be2d-98408745d63a nodeName:}" failed. No retries permitted until 2026-01-04 12:12:33.187732908 +0000 UTC m=+1452.044919617 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift") pod "swift-proxy-5746959b69-brph4" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.242532 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.246298 4797 generic.go:334] "Generic (PLEG): container finished" podID="9354793e-2888-4fba-a93a-841077bd7270" containerID="ab503fd970b32577760bbe5ec35f3c0df3a184059f20a5bf6e5b7c34bf2d9638" exitCode=2 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.246363 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerDied","Data":"ab503fd970b32577760bbe5ec35f3c0df3a184059f20a5bf6e5b7c34bf2d9638"} Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.260940 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-qc984"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.261075 4797 generic.go:334] "Generic (PLEG): container finished" podID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerID="b2a3aa87e9987ca244304550b8c7cb9d2bc8fd403c42e8b33895b09bf3e9a6f0" exitCode=2 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.261101 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerDied","Data":"b2a3aa87e9987ca244304550b8c7cb9d2bc8fd403c42e8b33895b09bf3e9a6f0"} Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.271672 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.287454 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.287596 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data podName:1414255a-a94a-4508-aa55-4ad9837afbea nodeName:}" failed. No retries permitted until 2026-01-04 12:12:33.287578859 +0000 UTC m=+1452.144765568 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data") pod "rabbitmq-server-0" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea") : configmap "rabbitmq-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.302093 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-efba-account-create-update-mvjk5"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.309895 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-efba-account-create-update-mvjk5"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.317470 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.318010 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="openstack-network-exporter" containerID="cri-o://876e9084cf30c1be04d840130d0d0fc76c012572a641f8f762449c931b2c2a7b" gracePeriod=300 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.335286 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-fc00-account-create-update-nssh5"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.349386 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-lnngq"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.358429 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-ppk5k"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.389626 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-fc00-account-create-update-nssh5"] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.391174 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.391221 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data podName:d5065e47-366d-4fc9-9acb-f7691489b27d nodeName:}" failed. No retries permitted until 2026-01-04 12:12:32.891207674 +0000 UTC m=+1451.748394383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data") pod "rabbitmq-cell1-server-0" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.419781 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-lnngq"] Jan 04 12:12:32 crc kubenswrapper[4797]: W0104 12:12:32.423448 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6693e0d_f1c0_49c2_8901_4eb6d7d56681.slice/crio-3c5058a20316c589a657833302ddd78f24b977d4ca6aa28c5b937a6b3bb069ac WatchSource:0}: Error finding container 3c5058a20316c589a657833302ddd78f24b977d4ca6aa28c5b937a6b3bb069ac: Status 404 returned error can't find the container with id 3c5058a20316c589a657833302ddd78f24b977d4ca6aa28c5b937a6b3bb069ac Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.436049 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-ppk5k"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.457906 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="ovsdbserver-sb" containerID="cri-o://1b47902615c0883c42301d21eb8e7a3ecf78720d4c51636bcc79479f16c32b75" gracePeriod=300 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.458410 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lhm69"] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.459209 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:32 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: if [ -n "neutron" ]; then Jan 04 12:12:32 crc kubenswrapper[4797]: GRANT_DATABASE="neutron" Jan 04 12:12:32 crc kubenswrapper[4797]: else Jan 04 12:12:32 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:32 crc kubenswrapper[4797]: fi Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:32 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:32 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:32 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:32 crc kubenswrapper[4797]: # support updates Jan 04 12:12:32 crc kubenswrapper[4797]: Jan 04 12:12:32 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.460753 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-e050-account-create-update-kbbx4" podUID="b6693e0d-f1c0-49c2-8901-4eb6d7d56681" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.474841 4797 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/swift-storage-0" secret="" err="secret \"swift-swift-dockercfg-5tp46\" not found" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.485895 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.557135 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lhm69"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.633651 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.648638 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.648910 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="cinder-scheduler" containerID="cri-o://1105f111e259358a7b341195cb86cefe7de4dadda4bb97857fb143d53d1d387e" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.649061 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="probe" containerID="cri-o://1881930c5a9519ddc49465313777fe56cca8210c13be4a63ce3f9a3424e5e91c" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.686733 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.686944 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-txsr9" podUID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" containerName="openstack-network-exporter" containerID="cri-o://db595fec7696fae4c7b3b7368a9d2399b874117452a45b57928c938fea8c4220" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.698054 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.705184 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.705415 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56b94d8bbf-ng2pk" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-api" containerID="cri-o://8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.705504 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56b94d8bbf-ng2pk" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-httpd" containerID="cri-o://1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.714194 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.714222 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.714230 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.714240 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.714290 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:33.214275341 +0000 UTC m=+1452.071462050 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.722053 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.723122 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-log" containerID="cri-o://180cd379c89ba5c47f18c6ef1c23999a1a2f27dccb309fa51f167df314999171" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.723281 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-httpd" containerID="cri-o://12a99c3f2374bcf465e4dea1ca963d10cfbd969365c68cb800d0058e6b3033aa" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.735303 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.735554 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api-log" containerID="cri-o://9a78a250903e07d598e88432420c861a2b78d03523ca1f130ae64e7cae2c30a0" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.735572 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" containerID="cri-o://82da6920fae7841a168988c98103f1142bc6deaa632d2c919def4424335d4556" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.746398 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.755272 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.755528 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="dnsmasq-dns" containerID="cri-o://1e8c12d06a30d46110e5c3864a386ee61f29f7a73cce977df15092fc9de6dac5" gracePeriod=10 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.771820 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.159:8776/healthcheck\": EOF" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.771944 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.159:8776/healthcheck\": EOF" Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.902487 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.903034 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-log" containerID="cri-o://dc4a21b1db6be2e07ca76bdb6c4c04513e5342187c0ece820874f1a86da403bc" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.903518 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-httpd" containerID="cri-o://195feab2fc5a30a29388929e5db7f342ff1fd19485ed5d54f1b0daa3a22494a5" gracePeriod=30 Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.931682 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: E0104 12:12:32.931753 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data podName:d5065e47-366d-4fc9-9acb-f7691489b27d nodeName:}" failed. No retries permitted until 2026-01-04 12:12:33.931731994 +0000 UTC m=+1452.788918703 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data") pod "rabbitmq-cell1-server-0" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.944211 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9070-account-create-update-tf4w9"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.951347 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-zr4cn"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.979395 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9070-account-create-update-tf4w9"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.987777 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-zr4cn"] Jan 04 12:12:32 crc kubenswrapper[4797]: I0104 12:12:32.994432 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-d29k2"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.004383 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-d29k2"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.014338 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.014601 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-596bb9655b-hsz7j" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-log" containerID="cri-o://b36b3a17559f71b1c9cfe74bbb4df7e910711b7ff1898a03c491af436d97b4ee" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.014949 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-596bb9655b-hsz7j" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-api" containerID="cri-o://92b755c8a3261041cdba8ae7ff1475f229ccf920b10c9c7daf3c52db8f65c7c8" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.033362 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-698r7"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.061839 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-698r7"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.082181 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.082406 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5746959b69-brph4" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-httpd" containerID="cri-o://c24b57917dd5aa7e5ce4a5adee1907c75b34a49d88fc6eb6757c983dec5cfd3a" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.082756 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5746959b69-brph4" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-server" containerID="cri-o://b817e46fd4e00fefeca15f786375e9089980050aceb8fad138a6fef75a80c940" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.090200 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4447-account-create-update-k6c6m"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.104289 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4447-account-create-update-k6c6m"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.124527 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.139713 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.189161 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rw479"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.215123 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rw479"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.223929 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.224142 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener-log" containerID="cri-o://fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.224564 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener" containerID="cri-o://6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.238995 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.239209 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-log" containerID="cri-o://4ffd1060deed2e4837e76d50f06b6b55d7a3082c39494946544e96c70503bbad" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.239580 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-api" containerID="cri-o://b3e1a2411d4524d7382c2ff29d5707c5d826d0551904e8a9f2a1500c0fabd198" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242327 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242358 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242369 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242379 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242416 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:34.242403485 +0000 UTC m=+1453.099590194 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242459 4797 projected.go:263] Couldn't get secret openstack/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242469 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242476 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242483 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-5746959b69-brph4: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.242506 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift podName:c044e46d-b79e-4f22-be2d-98408745d63a nodeName:}" failed. No retries permitted until 2026-01-04 12:12:35.242498707 +0000 UTC m=+1454.099685416 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift") pod "swift-proxy-5746959b69-brph4" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:33 crc kubenswrapper[4797]: W0104 12:12:33.248694 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fc7d678_c699_4348_9654_5290b2d48bd3.slice/crio-3c1349e81d2bd05dc409186d5a7a05bc2626752d991d0f0abe3d9a705b3f3fae WatchSource:0}: Error finding container 3c1349e81d2bd05dc409186d5a7a05bc2626752d991d0f0abe3d9a705b3f3fae: Status 404 returned error can't find the container with id 3c1349e81d2bd05dc409186d5a7a05bc2626752d991d0f0abe3d9a705b3f3fae Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.252579 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.252960 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" containerID="cri-o://d84880cf79913873bb87aa245c4a430e9d1ba2de7f2b46e8c142037646d023ef" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.253067 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" containerID="cri-o://b087757871433f8a985387e4d7b03ea17d2cbf5af8c97d5436dc453c3de7fbde" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.253683 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: if [ -n "" ]; then Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="" Jan 04 12:12:33 crc kubenswrapper[4797]: else Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:33 crc kubenswrapper[4797]: fi Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:33 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:33 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:33 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:33 crc kubenswrapper[4797]: # support updates Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.255863 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-dg6d4" podUID="9fc7d678-c699-4348-9654-5290b2d48bd3" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.267438 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="rabbitmq" containerID="cri-o://47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9" gracePeriod=604800 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.271523 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lbhjz"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.282121 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lbhjz"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.292024 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.292276 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c675d9b9b-9fg4r" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api-log" containerID="cri-o://3e3a15f869a29fb29c3c3b83c8c750fb50a7d3d3675123fde9492287428afb82" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.292680 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c675d9b9b-9fg4r" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api" containerID="cri-o://90b02fc9419aacb467bc917b82b3cf36fa359aa8f7e3da9a5e9dd0d2acd2ff8b" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.297922 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.303977 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.304239 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6bfdff7977-xqk4d" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker-log" containerID="cri-o://fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.304376 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6bfdff7977-xqk4d" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker" containerID="cri-o://ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.334148 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.344808 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.344865 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data podName:1414255a-a94a-4508-aa55-4ad9837afbea nodeName:}" failed. No retries permitted until 2026-01-04 12:12:35.344851631 +0000 UTC m=+1454.202038340 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data") pod "rabbitmq-server-0" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea") : configmap "rabbitmq-config-data" not found Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.354499 4797 generic.go:334] "Generic (PLEG): container finished" podID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerID="180cd379c89ba5c47f18c6ef1c23999a1a2f27dccb309fa51f167df314999171" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.354677 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerDied","Data":"180cd379c89ba5c47f18c6ef1c23999a1a2f27dccb309fa51f167df314999171"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.366509 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-txsr9_a305d4e5-d5e2-4bac-85ec-568c06b92b98/openstack-network-exporter/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.366541 4797 generic.go:334] "Generic (PLEG): container finished" podID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" containerID="db595fec7696fae4c7b3b7368a9d2399b874117452a45b57928c938fea8c4220" exitCode=2 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.366579 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-txsr9" event={"ID":"a305d4e5-d5e2-4bac-85ec-568c06b92b98","Type":"ContainerDied","Data":"db595fec7696fae4c7b3b7368a9d2399b874117452a45b57928c938fea8c4220"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.382118 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-tlmxr"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.384103 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9354793e-2888-4fba-a93a-841077bd7270/ovsdbserver-nb/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.384140 4797 generic.go:334] "Generic (PLEG): container finished" podID="9354793e-2888-4fba-a93a-841077bd7270" containerID="d3af461d9e11838f802cfe0a7de35c8ace652dcb15039a7ab0301617ec90e7f2" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.384198 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerDied","Data":"d3af461d9e11838f802cfe0a7de35c8ace652dcb15039a7ab0301617ec90e7f2"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.384218 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9354793e-2888-4fba-a93a-841077bd7270","Type":"ContainerDied","Data":"d1315ae8a058420bc7be271bbc4e77777c76d634265824c96d8d296c973a410a"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.384229 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1315ae8a058420bc7be271bbc4e77777c76d634265824c96d8d296c973a410a" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.388890 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-tlmxr"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.391307 4797 generic.go:334] "Generic (PLEG): container finished" podID="f904f7de-5407-4427-a82c-e31b26195c0a" containerID="b36b3a17559f71b1c9cfe74bbb4df7e910711b7ff1898a03c491af436d97b4ee" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.391353 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerDied","Data":"b36b3a17559f71b1c9cfe74bbb4df7e910711b7ff1898a03c491af436d97b4ee"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.394518 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8d59-account-create-update-jf95g"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.403070 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-76htz"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.431644 4797 generic.go:334] "Generic (PLEG): container finished" podID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerID="1e8c12d06a30d46110e5c3864a386ee61f29f7a73cce977df15092fc9de6dac5" exitCode=0 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.431823 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8d59-account-create-update-jf95g"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.431851 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" event={"ID":"8d097117-3af5-47a7-bcbd-ad887d0972df","Type":"ContainerDied","Data":"1e8c12d06a30d46110e5c3864a386ee61f29f7a73cce977df15092fc9de6dac5"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.449059 4797 generic.go:334] "Generic (PLEG): container finished" podID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerID="1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423" exitCode=0 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.449124 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerDied","Data":"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.456971 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-kbbx4" event={"ID":"b6693e0d-f1c0-49c2-8901-4eb6d7d56681","Type":"ContainerStarted","Data":"3c5058a20316c589a657833302ddd78f24b977d4ca6aa28c5b937a6b3bb069ac"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.464649 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2d84926f-3521-47ed-9581-a7beb6762e06/ovsdbserver-sb/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.464699 4797 generic.go:334] "Generic (PLEG): container finished" podID="2d84926f-3521-47ed-9581-a7beb6762e06" containerID="876e9084cf30c1be04d840130d0d0fc76c012572a641f8f762449c931b2c2a7b" exitCode=2 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.464721 4797 generic.go:334] "Generic (PLEG): container finished" podID="2d84926f-3521-47ed-9581-a7beb6762e06" containerID="1b47902615c0883c42301d21eb8e7a3ecf78720d4c51636bcc79479f16c32b75" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.464786 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerDied","Data":"876e9084cf30c1be04d840130d0d0fc76c012572a641f8f762449c931b2c2a7b"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.464812 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerDied","Data":"1b47902615c0883c42301d21eb8e7a3ecf78720d4c51636bcc79479f16c32b75"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.468430 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dg6d4" event={"ID":"9fc7d678-c699-4348-9654-5290b2d48bd3","Type":"ContainerStarted","Data":"3c1349e81d2bd05dc409186d5a7a05bc2626752d991d0f0abe3d9a705b3f3fae"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.469740 4797 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-dg6d4" secret="" err="secret \"galera-openstack-cell1-dockercfg-rdc6f\" not found" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.473411 4797 generic.go:334] "Generic (PLEG): container finished" podID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerID="dc4a21b1db6be2e07ca76bdb6c4c04513e5342187c0ece820874f1a86da403bc" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.476418 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: if [ -n "neutron" ]; then Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="neutron" Jan 04 12:12:33 crc kubenswrapper[4797]: else Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:33 crc kubenswrapper[4797]: fi Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:33 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:33 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:33 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:33 crc kubenswrapper[4797]: # support updates Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.478600 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-e050-account-create-update-kbbx4" podUID="b6693e0d-f1c0-49c2-8901-4eb6d7d56681" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.490141 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: if [ -n "" ]; then Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="" Jan 04 12:12:33 crc kubenswrapper[4797]: else Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:33 crc kubenswrapper[4797]: fi Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:33 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:33 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:33 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:33 crc kubenswrapper[4797]: # support updates Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.491775 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-dg6d4" podUID="9fc7d678-c699-4348-9654-5290b2d48bd3" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.505016 4797 generic.go:334] "Generic (PLEG): container finished" podID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerID="9a78a250903e07d598e88432420c861a2b78d03523ca1f130ae64e7cae2c30a0" exitCode=143 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.505980 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-server" containerID="cri-o://3878ed80aaf358279c48ab4d9c9c529e7175864a9d22b44343cee0f365760367" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506611 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-server" containerID="cri-o://d94828be090a9c92168c4bc7043848fc90bb1e6e64c5c3b90561fa797b2111c6" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506718 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-updater" containerID="cri-o://2706f017a019b29eb56e1869f628c26c7a0403d0ac794f7452caa2d980a884e3" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506748 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="swift-recon-cron" containerID="cri-o://70f1225e794568121e45dff575941e5d35fc27a1defcff2979df065f4ecbaf37" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506836 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="rsync" containerID="cri-o://2ddf43a128cda79487c737e8b808ccee520d60c4184785332b0d190c687e82c4" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506864 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-auditor" containerID="cri-o://9d917f5809cfdb7d3e5560e391457ae4c770a5118b5655d11d4280c6634d5e65" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506942 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-expirer" containerID="cri-o://983ab368fd1a7cc5f480857b432d0a3e9e25e3576b0af3b3338d273ed9c26b17" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.506949 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-replicator" containerID="cri-o://2b82ee45235bd2492b8f0fbdeb7b02c90e80f762e2a643a80221e2da952c67c8" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507044 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-updater" containerID="cri-o://4d0acf805d72a971037b409b90ec01a6ebbc5d590e92c591c625d2fa707db99c" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507115 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-reaper" containerID="cri-o://99d34e3c79b062431c9d84a0e920a2cea64a5e8ddf3dd8c6b4b199964f36fd85" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507194 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-auditor" containerID="cri-o://95970be1d420d961aa5faba4feae52bf2847295f0bff54ad0ab5ecfc128f1139" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507210 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-auditor" containerID="cri-o://6434cfdff0caacbdc87686c3fccdc177be05f4195a31c1b2f6bbbfe1abc8fc18" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507283 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-replicator" containerID="cri-o://f92a49d431c52702876723a17e42bd64b37bc0d9cd0421a5b6941ec41e2dc6b7" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507299 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-replicator" containerID="cri-o://dac8160cf70dc4b847fed968eb4544e73150e080abe8ddac5ed8f69951612687" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.507044 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-server" containerID="cri-o://4bd8a0778e29226183846c723b3b1dbfb7b51f65f48a65e13a57c6521dc2d967" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.519907 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" containerID="cri-o://d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.600323 4797 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:12:33 crc kubenswrapper[4797]: + source /usr/local/bin/container-scripts/functions Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNBridge=br-int Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNEncapType=geneve Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNAvailabilityZones= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ EnableChassisAsGateway=true Jan 04 12:12:33 crc kubenswrapper[4797]: ++ PhysicalNetworks= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNHostName= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:12:33 crc kubenswrapper[4797]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:12:33 crc kubenswrapper[4797]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + sleep 0.5 Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + cleanup_ovsdb_server_semaphore Jan 04 12:12:33 crc kubenswrapper[4797]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:12:33 crc kubenswrapper[4797]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-2ft9n" message=< Jan 04 12:12:33 crc kubenswrapper[4797]: Exiting ovsdb-server (5) [ OK ] Jan 04 12:12:33 crc kubenswrapper[4797]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:12:33 crc kubenswrapper[4797]: + source /usr/local/bin/container-scripts/functions Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNBridge=br-int Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNEncapType=geneve Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNAvailabilityZones= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ EnableChassisAsGateway=true Jan 04 12:12:33 crc kubenswrapper[4797]: ++ PhysicalNetworks= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNHostName= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:12:33 crc kubenswrapper[4797]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:12:33 crc kubenswrapper[4797]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + sleep 0.5 Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + cleanup_ovsdb_server_semaphore Jan 04 12:12:33 crc kubenswrapper[4797]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:12:33 crc kubenswrapper[4797]: > Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.622392 4797 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:12:33 crc kubenswrapper[4797]: + source /usr/local/bin/container-scripts/functions Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNBridge=br-int Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNEncapType=geneve Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNAvailabilityZones= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ EnableChassisAsGateway=true Jan 04 12:12:33 crc kubenswrapper[4797]: ++ PhysicalNetworks= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ OVNHostName= Jan 04 12:12:33 crc kubenswrapper[4797]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:12:33 crc kubenswrapper[4797]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:12:33 crc kubenswrapper[4797]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:12:33 crc kubenswrapper[4797]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + sleep 0.5 Jan 04 12:12:33 crc kubenswrapper[4797]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:12:33 crc kubenswrapper[4797]: + cleanup_ovsdb_server_semaphore Jan 04 12:12:33 crc kubenswrapper[4797]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:12:33 crc kubenswrapper[4797]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:12:33 crc kubenswrapper[4797]: > pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" containerID="cri-o://07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.622464 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" containerID="cri-o://07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.628284 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="070ef98c-992d-485a-ad7b-bb2ed9bf3f6c" path="/var/lib/kubelet/pods/070ef98c-992d-485a-ad7b-bb2ed9bf3f6c/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.653761 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bd9c381-3736-4813-87e0-eb9ebaa6b8ee" path="/var/lib/kubelet/pods/0bd9c381-3736-4813-87e0-eb9ebaa6b8ee/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.654314 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1776cf53-feb7-4695-a910-c74a47e30dec" path="/var/lib/kubelet/pods/1776cf53-feb7-4695-a910-c74a47e30dec/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.654796 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21b80b72-ea6e-4983-81b5-f0482f65b8a1" path="/var/lib/kubelet/pods/21b80b72-ea6e-4983-81b5-f0482f65b8a1/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.655748 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36839b2d-0307-41a0-bf7c-c37d9280d5be" path="/var/lib/kubelet/pods/36839b2d-0307-41a0-bf7c-c37d9280d5be/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.686390 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="387afd59-3ed2-49d0-92f8-60c250ea32fd" path="/var/lib/kubelet/pods/387afd59-3ed2-49d0-92f8-60c250ea32fd/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.687382 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a2256c1-a1b7-4c63-92be-8283893ede12" path="/var/lib/kubelet/pods/4a2256c1-a1b7-4c63-92be-8283893ede12/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.687934 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52a647a9-b6cd-434c-a388-25def81293c4" path="/var/lib/kubelet/pods/52a647a9-b6cd-434c-a388-25def81293c4/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.688534 4797 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.688610 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts podName:9fc7d678-c699-4348-9654-5290b2d48bd3 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:34.18859275 +0000 UTC m=+1453.045779459 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts") pod "root-account-create-update-dg6d4" (UID: "9fc7d678-c699-4348-9654-5290b2d48bd3") : configmap "openstack-cell1-scripts" not found Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.688941 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db" path="/var/lib/kubelet/pods/5871ebc2-f0cd-4bf2-9aec-c8fcb1cc30db/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.689980 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65e365e6-5912-434a-a269-85dc5254dcba" path="/var/lib/kubelet/pods/65e365e6-5912-434a-a269-85dc5254dcba/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.691580 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9354793e-2888-4fba-a93a-841077bd7270/ovsdbserver-nb/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.691650 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.739121 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70761e1c-8caa-438d-b87d-c5f771e56ade" path="/var/lib/kubelet/pods/70761e1c-8caa-438d-b87d-c5f771e56ade/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.739623 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="724d4392-4880-4e98-b78e-676b080c32cc" path="/var/lib/kubelet/pods/724d4392-4880-4e98-b78e-676b080c32cc/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.740141 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e253c02-bdd1-46d2-b93e-e995108a867b" path="/var/lib/kubelet/pods/7e253c02-bdd1-46d2-b93e-e995108a867b/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.741523 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87e304fe-91cd-4ef9-841f-dd4fe87b6b35" path="/var/lib/kubelet/pods/87e304fe-91cd-4ef9-841f-dd4fe87b6b35/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.742027 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91d21bdd-4ab0-47ed-9a86-cd1473ce90a6" path="/var/lib/kubelet/pods/91d21bdd-4ab0-47ed-9a86-cd1473ce90a6/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.742507 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8575d68-d47c-4e79-a81f-0690139b672f" path="/var/lib/kubelet/pods/a8575d68-d47c-4e79-a81f-0690139b672f/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.754796 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2d84926f-3521-47ed-9581-a7beb6762e06/ovsdbserver-sb/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.754906 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.762624 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-txsr9_a305d4e5-d5e2-4bac-85ec-568c06b92b98/openstack-network-exporter/0.log" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.762683 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763437 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763499 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763519 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763550 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763599 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763625 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763729 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbdx\" (UniqueName: \"kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.763779 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config\") pod \"9354793e-2888-4fba-a93a-841077bd7270\" (UID: \"9354793e-2888-4fba-a93a-841077bd7270\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.765960 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee8bb47-3406-4f2c-8159-e6b9031ef090" path="/var/lib/kubelet/pods/aee8bb47-3406-4f2c-8159-e6b9031ef090/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.766500 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5b103c7-519f-4b21-a5a0-32e656db2dc9" path="/var/lib/kubelet/pods/d5b103c7-519f-4b21-a5a0-32e656db2dc9/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.766991 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae99fe4-dbd0-47e7-b9ae-53689293573b" path="/var/lib/kubelet/pods/dae99fe4-dbd0-47e7-b9ae-53689293573b/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.767986 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e64d6948-1c4e-4db6-b739-24b2aba46fd3" path="/var/lib/kubelet/pods/e64d6948-1c4e-4db6-b739-24b2aba46fd3/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: W0104 12:12:33.769156 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5da8a74d_b58c_4960_ac79_9b440f78fe73.slice/crio-b9f1058be4ae070b78966016a0654760ea91852400a344fb5e36433566bc1cef WatchSource:0}: Error finding container b9f1058be4ae070b78966016a0654760ea91852400a344fb5e36433566bc1cef: Status 404 returned error can't find the container with id b9f1058be4ae070b78966016a0654760ea91852400a344fb5e36433566bc1cef Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.769871 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e763a131-879e-4bfd-8138-2d3da9195289" path="/var/lib/kubelet/pods/e763a131-879e-4bfd-8138-2d3da9195289/volumes" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.770523 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779171 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-9k482"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779218 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-76htz"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779239 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerDied","Data":"dc4a21b1db6be2e07ca76bdb6c4c04513e5342187c0ece820874f1a86da403bc"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779265 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-9k482"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779276 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerDied","Data":"9a78a250903e07d598e88432420c861a2b78d03523ca1f130ae64e7cae2c30a0"} Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779290 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-srpmm"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.779303 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-srpmm"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.780374 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9354793e-2888-4fba-a93a-841077bd7270-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.780719 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.782392 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config" (OuterVolumeSpecName: "config") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.782592 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts" (OuterVolumeSpecName: "scripts") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.811857 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.843016 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.848497 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.882241 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx" (OuterVolumeSpecName: "kube-api-access-cfbdx") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "kube-api-access-cfbdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883378 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883415 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883454 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883494 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883523 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883538 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883562 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883580 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883631 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b57fv\" (UniqueName: \"kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883661 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883684 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883703 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883719 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883749 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883793 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883815 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883834 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts\") pod \"2d84926f-3521-47ed-9581-a7beb6762e06\" (UID: \"2d84926f-3521-47ed-9581-a7beb6762e06\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883862 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5glk\" (UniqueName: \"kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk\") pod \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\" (UID: \"a305d4e5-d5e2-4bac-85ec-568c06b92b98\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883897 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28x9z\" (UniqueName: \"kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.883925 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0\") pod \"8d097117-3af5-47a7-bcbd-ad887d0972df\" (UID: \"8d097117-3af5-47a7-bcbd-ad887d0972df\") " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.885447 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbdx\" (UniqueName: \"kubernetes.io/projected/9354793e-2888-4fba-a93a-841077bd7270-kube-api-access-cfbdx\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.885465 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.885474 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.885495 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.885505 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9354793e-2888-4fba-a93a-841077bd7270-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.893274 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.893426 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config" (OuterVolumeSpecName: "config") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.899879 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.900217 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.900231 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.900396 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.900682 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk" (OuterVolumeSpecName: "kube-api-access-v5glk") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "kube-api-access-v5glk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.900756 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.901125 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts" (OuterVolumeSpecName: "scripts") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.904548 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z" (OuterVolumeSpecName: "kube-api-access-28x9z") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "kube-api-access-28x9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.904882 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config" (OuterVolumeSpecName: "config") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.910064 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.924566 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="galera" containerID="cri-o://0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.924866 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:33 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: if [ -n "placement" ]; then Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="placement" Jan 04 12:12:33 crc kubenswrapper[4797]: else Jan 04 12:12:33 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:33 crc kubenswrapper[4797]: fi Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:33 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:33 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:33 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:33 crc kubenswrapper[4797]: # support updates Jan 04 12:12:33 crc kubenswrapper[4797]: Jan 04 12:12:33 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.928909 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack/placement-0a7e-account-create-update-7g9mc" podUID="5da8a74d-b58c-4960-ac79-9b440f78fe73" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.942290 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.948105 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.948519 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerName="nova-scheduler-scheduler" containerID="cri-o://e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" gracePeriod=30 Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988289 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988345 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988360 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988369 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d84926f-3521-47ed-9581-a7beb6762e06-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988378 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5glk\" (UniqueName: \"kubernetes.io/projected/a305d4e5-d5e2-4bac-85ec-568c06b92b98-kube-api-access-v5glk\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988387 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28x9z\" (UniqueName: \"kubernetes.io/projected/8d097117-3af5-47a7-bcbd-ad887d0972df-kube-api-access-28x9z\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988396 4797 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a305d4e5-d5e2-4bac-85ec-568c06b92b98-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988404 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.988412 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a305d4e5-d5e2-4bac-85ec-568c06b92b98-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[4797]: I0104 12:12:33.989537 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv" (OuterVolumeSpecName: "kube-api-access-b57fv") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "kube-api-access-b57fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.990257 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:33 crc kubenswrapper[4797]: E0104 12:12:33.990305 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data podName:d5065e47-366d-4fc9-9acb-f7691489b27d nodeName:}" failed. No retries permitted until 2026-01-04 12:12:35.990288538 +0000 UTC m=+1454.847475247 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data") pod "rabbitmq-cell1-server-0" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.021523 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.090770 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b57fv\" (UniqueName: \"kubernetes.io/projected/2d84926f-3521-47ed-9581-a7beb6762e06-kube-api-access-b57fv\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.135766 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.138992 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="rabbitmq" containerID="cri-o://6c2bd4e26c7793a7e6748b52de489f7127e21a2457e862fa1b66701b1c8d40a1" gracePeriod=604800 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.149294 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.162661 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.162938 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" gracePeriod=30 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.189786 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wfvb9"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.193988 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.194020 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.194071 4797 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.194110 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts podName:9fc7d678-c699-4348-9654-5290b2d48bd3 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:35.194095664 +0000 UTC m=+1454.051282373 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts") pod "root-account-create-update-dg6d4" (UID: "9fc7d678-c699-4348-9654-5290b2d48bd3") : configmap "openstack-cell1-scripts" not found Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.194344 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wfvb9"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.199286 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.206707 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wjgc4"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.211939 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.214766 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.214950 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wjgc4"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.223394 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.223622 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerName="nova-cell0-conductor-conductor" containerID="cri-o://bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" gracePeriod=30 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.260281 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.294232 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.295804 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.295824 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.295833 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.295844 4797 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.295852 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.295926 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.295940 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.295948 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.295958 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.296055 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:36.295990036 +0000 UTC m=+1455.153176745 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.343949 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9354793e-2888-4fba-a93a-841077bd7270" (UID: "9354793e-2888-4fba-a93a-841077bd7270"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.395660 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.399186 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.399219 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9354793e-2888-4fba-a93a-841077bd7270-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.410698 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "a305d4e5-d5e2-4bac-85ec-568c06b92b98" (UID: "a305d4e5-d5e2-4bac-85ec-568c06b92b98"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.421713 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.490708 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.501247 4797 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.501277 4797 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.501289 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a305d4e5-d5e2-4bac-85ec-568c06b92b98-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.519516 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config" (OuterVolumeSpecName: "config") pod "8d097117-3af5-47a7-bcbd-ad887d0972df" (UID: "8d097117-3af5-47a7-bcbd-ad887d0972df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.539204 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "2d84926f-3521-47ed-9581-a7beb6762e06" (UID: "2d84926f-3521-47ed-9581-a7beb6762e06"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.539255 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2d84926f-3521-47ed-9581-a7beb6762e06/ovsdbserver-sb/0.log" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.539355 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.539381 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2d84926f-3521-47ed-9581-a7beb6762e06","Type":"ContainerDied","Data":"bf9532ac64b46e063d3f67cc8b7a7c6b2ae05b027b7998fb4afed29a2756f447"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.539429 4797 scope.go:117] "RemoveContainer" containerID="876e9084cf30c1be04d840130d0d0fc76c012572a641f8f762449c931b2c2a7b" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.561219 4797 generic.go:334] "Generic (PLEG): container finished" podID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerID="1881930c5a9519ddc49465313777fe56cca8210c13be4a63ce3f9a3424e5e91c" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.561257 4797 generic.go:334] "Generic (PLEG): container finished" podID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerID="1105f111e259358a7b341195cb86cefe7de4dadda4bb97857fb143d53d1d387e" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.561323 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerDied","Data":"1881930c5a9519ddc49465313777fe56cca8210c13be4a63ce3f9a3424e5e91c"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.561354 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerDied","Data":"1105f111e259358a7b341195cb86cefe7de4dadda4bb97857fb143d53d1d387e"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.596295 4797 generic.go:334] "Generic (PLEG): container finished" podID="c7bef264-130e-4b89-ae25-bff622d12a16" containerID="4ffd1060deed2e4837e76d50f06b6b55d7a3082c39494946544e96c70503bbad" exitCode=143 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.596402 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerDied","Data":"4ffd1060deed2e4837e76d50f06b6b55d7a3082c39494946544e96c70503bbad"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.599876 4797 generic.go:334] "Generic (PLEG): container finished" podID="c044e46d-b79e-4f22-be2d-98408745d63a" containerID="b817e46fd4e00fefeca15f786375e9089980050aceb8fad138a6fef75a80c940" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.599960 4797 generic.go:334] "Generic (PLEG): container finished" podID="c044e46d-b79e-4f22-be2d-98408745d63a" containerID="c24b57917dd5aa7e5ce4a5adee1907c75b34a49d88fc6eb6757c983dec5cfd3a" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.600025 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerDied","Data":"b817e46fd4e00fefeca15f786375e9089980050aceb8fad138a6fef75a80c940"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.600054 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerDied","Data":"c24b57917dd5aa7e5ce4a5adee1907c75b34a49d88fc6eb6757c983dec5cfd3a"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.600070 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5746959b69-brph4" event={"ID":"c044e46d-b79e-4f22-be2d-98408745d63a","Type":"ContainerDied","Data":"828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.600081 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="828cb4abd52e4953f212db3a52d312d931c0b5dc174503986c40c686a5e023c0" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.602739 4797 generic.go:334] "Generic (PLEG): container finished" podID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.602802 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerDied","Data":"07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.604768 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d84926f-3521-47ed-9581-a7beb6762e06-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.604801 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d097117-3af5-47a7-bcbd-ad887d0972df-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.608840 4797 generic.go:334] "Generic (PLEG): container finished" podID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerID="3e3a15f869a29fb29c3c3b83c8c750fb50a7d3d3675123fde9492287428afb82" exitCode=143 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.608897 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerDied","Data":"3e3a15f869a29fb29c3c3b83c8c750fb50a7d3d3675123fde9492287428afb82"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.619279 4797 generic.go:334] "Generic (PLEG): container finished" podID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerID="fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154" exitCode=143 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.619384 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerDied","Data":"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.621541 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-7g9mc" event={"ID":"5da8a74d-b58c-4960-ac79-9b440f78fe73","Type":"ContainerStarted","Data":"b9f1058be4ae070b78966016a0654760ea91852400a344fb5e36433566bc1cef"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.692934 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="2ddf43a128cda79487c737e8b808ccee520d60c4184785332b0d190c687e82c4" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.692970 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="983ab368fd1a7cc5f480857b432d0a3e9e25e3576b0af3b3338d273ed9c26b17" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.692982 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="4d0acf805d72a971037b409b90ec01a6ebbc5d590e92c591c625d2fa707db99c" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.692992 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="6434cfdff0caacbdc87686c3fccdc177be05f4195a31c1b2f6bbbfe1abc8fc18" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693021 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="dac8160cf70dc4b847fed968eb4544e73150e080abe8ddac5ed8f69951612687" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693031 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="d94828be090a9c92168c4bc7043848fc90bb1e6e64c5c3b90561fa797b2111c6" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693041 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="2706f017a019b29eb56e1869f628c26c7a0403d0ac794f7452caa2d980a884e3" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693051 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="9d917f5809cfdb7d3e5560e391457ae4c770a5118b5655d11d4280c6634d5e65" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693059 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="2b82ee45235bd2492b8f0fbdeb7b02c90e80f762e2a643a80221e2da952c67c8" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693067 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="4bd8a0778e29226183846c723b3b1dbfb7b51f65f48a65e13a57c6521dc2d967" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693075 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="99d34e3c79b062431c9d84a0e920a2cea64a5e8ddf3dd8c6b4b199964f36fd85" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693083 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="95970be1d420d961aa5faba4feae52bf2847295f0bff54ad0ab5ecfc128f1139" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693090 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="f92a49d431c52702876723a17e42bd64b37bc0d9cd0421a5b6941ec41e2dc6b7" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693098 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="3878ed80aaf358279c48ab4d9c9c529e7175864a9d22b44343cee0f365760367" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693163 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"2ddf43a128cda79487c737e8b808ccee520d60c4184785332b0d190c687e82c4"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693194 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"983ab368fd1a7cc5f480857b432d0a3e9e25e3576b0af3b3338d273ed9c26b17"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693211 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"4d0acf805d72a971037b409b90ec01a6ebbc5d590e92c591c625d2fa707db99c"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693221 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"6434cfdff0caacbdc87686c3fccdc177be05f4195a31c1b2f6bbbfe1abc8fc18"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693232 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"dac8160cf70dc4b847fed968eb4544e73150e080abe8ddac5ed8f69951612687"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693243 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"d94828be090a9c92168c4bc7043848fc90bb1e6e64c5c3b90561fa797b2111c6"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693252 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"2706f017a019b29eb56e1869f628c26c7a0403d0ac794f7452caa2d980a884e3"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693268 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"9d917f5809cfdb7d3e5560e391457ae4c770a5118b5655d11d4280c6634d5e65"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693281 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"2b82ee45235bd2492b8f0fbdeb7b02c90e80f762e2a643a80221e2da952c67c8"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693302 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"4bd8a0778e29226183846c723b3b1dbfb7b51f65f48a65e13a57c6521dc2d967"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693314 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"99d34e3c79b062431c9d84a0e920a2cea64a5e8ddf3dd8c6b4b199964f36fd85"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693326 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"95970be1d420d961aa5faba4feae52bf2847295f0bff54ad0ab5ecfc128f1139"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693336 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"f92a49d431c52702876723a17e42bd64b37bc0d9cd0421a5b6941ec41e2dc6b7"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.693348 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"3878ed80aaf358279c48ab4d9c9c529e7175864a9d22b44343cee0f365760367"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.697836 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.709408 4797 generic.go:334] "Generic (PLEG): container finished" podID="ea5de376-7b66-40d0-8565-c8a34961540b" containerID="a95bc6ae38195a799c2eec6d7e3b1fc38748ec8ad711d67378d10d8eb96a34a6" exitCode=137 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.709522 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12058c4a0906fc2a08ff65ffb16bd6d5e11721e2d8738c74b84b3e1db759452a" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.709925 4797 scope.go:117] "RemoveContainer" containerID="1b47902615c0883c42301d21eb8e7a3ecf78720d4c51636bcc79479f16c32b75" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.742196 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.761319 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.767638 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.768173 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" event={"ID":"8d097117-3af5-47a7-bcbd-ad887d0972df","Type":"ContainerDied","Data":"459dc42a5bf1f9a83e4b075b0c3e22e1d7173609fd4a2b9868696a3d21529525"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.768273 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867cd545c7-6sldr" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.788069 4797 generic.go:334] "Generic (PLEG): container finished" podID="a1480006-db62-4dfd-af3b-c394600f632c" containerID="fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713" exitCode=143 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.788257 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerDied","Data":"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.790273 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-txsr9_a305d4e5-d5e2-4bac-85ec-568c06b92b98/openstack-network-exporter/0.log" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.790376 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-txsr9" event={"ID":"a305d4e5-d5e2-4bac-85ec-568c06b92b98","Type":"ContainerDied","Data":"1641f6584669b10c7e6092eb04077767c3bc7d6d0dd4e03089481e7c49b1e589"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.790493 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-txsr9" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.797095 4797 scope.go:117] "RemoveContainer" containerID="1e8c12d06a30d46110e5c3864a386ee61f29f7a73cce977df15092fc9de6dac5" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.798405 4797 generic.go:334] "Generic (PLEG): container finished" podID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerID="d84880cf79913873bb87aa245c4a430e9d1ba2de7f2b46e8c142037646d023ef" exitCode=143 Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.798954 4797 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-dg6d4" secret="" err="secret \"galera-openstack-cell1-dockercfg-rdc6f\" not found" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.799024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerDied","Data":"d84880cf79913873bb87aa245c4a430e9d1ba2de7f2b46e8c142037646d023ef"} Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.799209 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.800664 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:34 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: if [ -n "" ]; then Jan 04 12:12:34 crc kubenswrapper[4797]: GRANT_DATABASE="" Jan 04 12:12:34 crc kubenswrapper[4797]: else Jan 04 12:12:34 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:34 crc kubenswrapper[4797]: fi Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:34 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:34 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:34 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:34 crc kubenswrapper[4797]: # support updates Jan 04 12:12:34 crc kubenswrapper[4797]: Jan 04 12:12:34 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:34 crc kubenswrapper[4797]: E0104 12:12:34.801934 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-dg6d4" podUID="9fc7d678-c699-4348-9654-5290b2d48bd3" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.808659 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.808903 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.812712 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.812821 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.812850 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgc5d\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.812953 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.813048 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.813147 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd\") pod \"c044e46d-b79e-4f22-be2d-98408745d63a\" (UID: \"c044e46d-b79e-4f22-be2d-98408745d63a\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.814372 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.814453 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.814851 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.830298 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d" (OuterVolumeSpecName: "kube-api-access-fgc5d") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "kube-api-access-fgc5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.833254 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.833383 4797 scope.go:117] "RemoveContainer" containerID="942a2a207d33a194350ddb61a268b463381597f7aa1bb2b9c422d7f35718c86b" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.880246 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.913822 4797 scope.go:117] "RemoveContainer" containerID="db595fec7696fae4c7b3b7368a9d2399b874117452a45b57928c938fea8c4220" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.918319 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret\") pod \"ea5de376-7b66-40d0-8565-c8a34961540b\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.918350 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb8rw\" (UniqueName: \"kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw\") pod \"ea5de376-7b66-40d0-8565-c8a34961540b\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.918890 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle\") pod \"ea5de376-7b66-40d0-8565-c8a34961540b\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.918932 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config\") pod \"ea5de376-7b66-40d0-8565-c8a34961540b\" (UID: \"ea5de376-7b66-40d0-8565-c8a34961540b\") " Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.928623 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-867cd545c7-6sldr"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.932223 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.941256 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.941284 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgc5d\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-kube-api-access-fgc5d\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.941294 4797 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c044e46d-b79e-4f22-be2d-98408745d63a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.941303 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c044e46d-b79e-4f22-be2d-98408745d63a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.943229 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.945255 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw" (OuterVolumeSpecName: "kube-api-access-gb8rw") pod "ea5de376-7b66-40d0-8565-c8a34961540b" (UID: "ea5de376-7b66-40d0-8565-c8a34961540b"). InnerVolumeSpecName "kube-api-access-gb8rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.957547 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.969214 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.983356 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ea5de376-7b66-40d0-8565-c8a34961540b" (UID: "ea5de376-7b66-40d0-8565-c8a34961540b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.990313 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.993168 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea5de376-7b66-40d0-8565-c8a34961540b" (UID: "ea5de376-7b66-40d0-8565-c8a34961540b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[4797]: I0104 12:12:34.984966 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-txsr9"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.036269 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.043173 4797 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.043192 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.043202 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb8rw\" (UniqueName: \"kubernetes.io/projected/ea5de376-7b66-40d0-8565-c8a34961540b-kube-api-access-gb8rw\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.043213 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.043222 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.059629 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.107373 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ea5de376-7b66-40d0-8565-c8a34961540b" (UID: "ea5de376-7b66-40d0-8565-c8a34961540b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.139606 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data" (OuterVolumeSpecName: "config-data") pod "c044e46d-b79e-4f22-be2d-98408745d63a" (UID: "c044e46d-b79e-4f22-be2d-98408745d63a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.150124 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data\") pod \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.150196 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs\") pod \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.150286 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzcn2\" (UniqueName: \"kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2\") pod \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.150317 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle\") pod \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.150362 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs\") pod \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\" (UID: \"b6ef2789-7ab1-46b1-852e-5bbb106b4044\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.151494 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c044e46d-b79e-4f22-be2d-98408745d63a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.151512 4797 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ea5de376-7b66-40d0-8565-c8a34961540b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.185153 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2" (OuterVolumeSpecName: "kube-api-access-kzcn2") pod "b6ef2789-7ab1-46b1-852e-5bbb106b4044" (UID: "b6ef2789-7ab1-46b1-852e-5bbb106b4044"). InnerVolumeSpecName "kube-api-access-kzcn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.190149 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6ef2789-7ab1-46b1-852e-5bbb106b4044" (UID: "b6ef2789-7ab1-46b1-852e-5bbb106b4044"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.190855 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.238068 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data" (OuterVolumeSpecName: "config-data") pod "b6ef2789-7ab1-46b1-852e-5bbb106b4044" (UID: "b6ef2789-7ab1-46b1-852e-5bbb106b4044"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.255692 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts\") pod \"5da8a74d-b58c-4960-ac79-9b440f78fe73\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.255764 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9lr6\" (UniqueName: \"kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6\") pod \"5da8a74d-b58c-4960-ac79-9b440f78fe73\" (UID: \"5da8a74d-b58c-4960-ac79-9b440f78fe73\") " Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.256206 4797 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.256307 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts podName:9fc7d678-c699-4348-9654-5290b2d48bd3 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:37.256272927 +0000 UTC m=+1456.113459636 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts") pod "root-account-create-update-dg6d4" (UID: "9fc7d678-c699-4348-9654-5290b2d48bd3") : configmap "openstack-cell1-scripts" not found Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.256337 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.256352 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzcn2\" (UniqueName: \"kubernetes.io/projected/b6ef2789-7ab1-46b1-852e-5bbb106b4044-kube-api-access-kzcn2\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.256363 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.256847 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5da8a74d-b58c-4960-ac79-9b440f78fe73" (UID: "5da8a74d-b58c-4960-ac79-9b440f78fe73"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.263641 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6" (OuterVolumeSpecName: "kube-api-access-d9lr6") pod "5da8a74d-b58c-4960-ac79-9b440f78fe73" (UID: "5da8a74d-b58c-4960-ac79-9b440f78fe73"). InnerVolumeSpecName "kube-api-access-d9lr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.265628 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "b6ef2789-7ab1-46b1-852e-5bbb106b4044" (UID: "b6ef2789-7ab1-46b1-852e-5bbb106b4044"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.299352 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "b6ef2789-7ab1-46b1-852e-5bbb106b4044" (UID: "b6ef2789-7ab1-46b1-852e-5bbb106b4044"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.354354 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.361210 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5da8a74d-b58c-4960-ac79-9b440f78fe73-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.361237 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9lr6\" (UniqueName: \"kubernetes.io/projected/5da8a74d-b58c-4960-ac79-9b440f78fe73-kube-api-access-d9lr6\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.361247 4797 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.361257 4797 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ef2789-7ab1-46b1-852e-5bbb106b4044-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.361383 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.361427 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data podName:1414255a-a94a-4508-aa55-4ad9837afbea nodeName:}" failed. No retries permitted until 2026-01-04 12:12:39.36141542 +0000 UTC m=+1458.218602129 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data") pod "rabbitmq-server-0" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea") : configmap "rabbitmq-config-data" not found Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.436751 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.445784 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.447742 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.461909 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ks9cx\" (UniqueName: \"kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx\") pod \"a1480006-db62-4dfd-af3b-c394600f632c\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.461938 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.461957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.461974 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462012 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data\") pod \"a1480006-db62-4dfd-af3b-c394600f632c\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462036 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462070 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle\") pod \"a1480006-db62-4dfd-af3b-c394600f632c\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462095 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4bht\" (UniqueName: \"kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462122 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs\") pod \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462160 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle\") pod \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462175 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts\") pod \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462199 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data\") pod \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462213 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data\") pod \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\" (UID: \"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462230 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom\") pod \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462263 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom\") pod \"a1480006-db62-4dfd-af3b-c394600f632c\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462280 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs\") pod \"a1480006-db62-4dfd-af3b-c394600f632c\" (UID: \"a1480006-db62-4dfd-af3b-c394600f632c\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462298 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-596vd\" (UniqueName: \"kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd\") pod \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\" (UID: \"3571eba4-ffe7-46c9-a3ba-895a81b311a9\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.462323 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm46n\" (UniqueName: \"kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n\") pod \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\" (UID: \"b6693e0d-f1c0-49c2-8901-4eb6d7d56681\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.467119 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n" (OuterVolumeSpecName: "kube-api-access-jm46n") pod "b6693e0d-f1c0-49c2-8901-4eb6d7d56681" (UID: "b6693e0d-f1c0-49c2-8901-4eb6d7d56681"). InnerVolumeSpecName "kube-api-access-jm46n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.472445 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx" (OuterVolumeSpecName: "kube-api-access-ks9cx") pod "a1480006-db62-4dfd-af3b-c394600f632c" (UID: "a1480006-db62-4dfd-af3b-c394600f632c"). InnerVolumeSpecName "kube-api-access-ks9cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.474245 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs" (OuterVolumeSpecName: "logs") pod "3571eba4-ffe7-46c9-a3ba-895a81b311a9" (UID: "3571eba4-ffe7-46c9-a3ba-895a81b311a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.480370 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6693e0d-f1c0-49c2-8901-4eb6d7d56681" (UID: "b6693e0d-f1c0-49c2-8901-4eb6d7d56681"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.480868 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs" (OuterVolumeSpecName: "logs") pod "a1480006-db62-4dfd-af3b-c394600f632c" (UID: "a1480006-db62-4dfd-af3b-c394600f632c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.482179 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.482242 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.482682 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3571eba4-ffe7-46c9-a3ba-895a81b311a9" (UID: "3571eba4-ffe7-46c9-a3ba-895a81b311a9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.484398 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a1480006-db62-4dfd-af3b-c394600f632c" (UID: "a1480006-db62-4dfd-af3b-c394600f632c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.484980 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts" (OuterVolumeSpecName: "scripts") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.490199 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd" (OuterVolumeSpecName: "kube-api-access-596vd") pod "3571eba4-ffe7-46c9-a3ba-895a81b311a9" (UID: "3571eba4-ffe7-46c9-a3ba-895a81b311a9"). InnerVolumeSpecName "kube-api-access-596vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.511739 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="255b543b-ea95-457a-a7b5-63190019b8e8" path="/var/lib/kubelet/pods/255b543b-ea95-457a-a7b5-63190019b8e8/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.512124 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht" (OuterVolumeSpecName: "kube-api-access-z4bht") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "kube-api-access-z4bht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.512730 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" path="/var/lib/kubelet/pods/2d84926f-3521-47ed-9581-a7beb6762e06/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.513320 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a1a1ecb-6333-434f-a843-4541ddcc9f48" path="/var/lib/kubelet/pods/3a1a1ecb-6333-434f-a843-4541ddcc9f48/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.514339 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88052d87-10b0-4814-9cc7-62e7040b415a" path="/var/lib/kubelet/pods/88052d87-10b0-4814-9cc7-62e7040b415a/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.514844 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" path="/var/lib/kubelet/pods/8d097117-3af5-47a7-bcbd-ad887d0972df/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.515456 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9354793e-2888-4fba-a93a-841077bd7270" path="/var/lib/kubelet/pods/9354793e-2888-4fba-a93a-841077bd7270/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.516420 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" path="/var/lib/kubelet/pods/a305d4e5-d5e2-4bac-85ec-568c06b92b98/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.517024 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d53eda-fceb-4efa-a0fd-86912378be1c" path="/var/lib/kubelet/pods/a9d53eda-fceb-4efa-a0fd-86912378be1c/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.517522 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5de376-7b66-40d0-8565-c8a34961540b" path="/var/lib/kubelet/pods/ea5de376-7b66-40d0-8565-c8a34961540b/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.518459 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe6fbd9-7112-4c2c-a088-2da12bce0ddb" path="/var/lib/kubelet/pods/ebe6fbd9-7112-4c2c-a088-2da12bce0ddb/volumes" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.553153 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3571eba4-ffe7-46c9-a3ba-895a81b311a9" (UID: "3571eba4-ffe7-46c9-a3ba-895a81b311a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.564163 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ks9cx\" (UniqueName: \"kubernetes.io/projected/a1480006-db62-4dfd-af3b-c394600f632c-kube-api-access-ks9cx\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566209 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566294 4797 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566369 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566442 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4bht\" (UniqueName: \"kubernetes.io/projected/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-kube-api-access-z4bht\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566504 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3571eba4-ffe7-46c9-a3ba-895a81b311a9-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566562 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566615 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566686 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566755 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566823 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1480006-db62-4dfd-af3b-c394600f632c-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566919 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-596vd\" (UniqueName: \"kubernetes.io/projected/3571eba4-ffe7-46c9-a3ba-895a81b311a9-kube-api-access-596vd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.566972 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm46n\" (UniqueName: \"kubernetes.io/projected/b6693e0d-f1c0-49c2-8901-4eb6d7d56681-kube-api-access-jm46n\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.568094 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.588726 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1480006-db62-4dfd-af3b-c394600f632c" (UID: "a1480006-db62-4dfd-af3b-c394600f632c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.604501 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data" (OuterVolumeSpecName: "config-data") pod "a1480006-db62-4dfd-af3b-c394600f632c" (UID: "a1480006-db62-4dfd-af3b-c394600f632c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.609554 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data" (OuterVolumeSpecName: "config-data") pod "3571eba4-ffe7-46c9-a3ba-895a81b311a9" (UID: "3571eba4-ffe7-46c9-a3ba-895a81b311a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.616253 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.636191 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.638157 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.641561 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.641604 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="ovn-northd" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.660062 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data" (OuterVolumeSpecName: "config-data") pod "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" (UID: "d941aae2-1ed7-41ee-a2a3-38a23a9de6ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.668682 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phcfm\" (UniqueName: \"kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.668763 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.668831 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.668868 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.668921 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669019 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669105 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669132 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default\") pod \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\" (UID: \"72e6c6e9-97f2-4420-a6b9-92418e78dd60\") " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669478 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669495 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3571eba4-ffe7-46c9-a3ba-895a81b311a9-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669504 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669513 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.669524 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1480006-db62-4dfd-af3b-c394600f632c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.670128 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.670452 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.670562 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.670950 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.693158 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm" (OuterVolumeSpecName: "kube-api-access-phcfm") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "kube-api-access-phcfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.700468 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.706258 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.725141 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "72e6c6e9-97f2-4420-a6b9-92418e78dd60" (UID: "72e6c6e9-97f2-4420-a6b9-92418e78dd60"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771493 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phcfm\" (UniqueName: \"kubernetes.io/projected/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kube-api-access-phcfm\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771529 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771550 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771560 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771569 4797 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771578 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e6c6e9-97f2-4420-a6b9-92418e78dd60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771586 4797 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.771594 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72e6c6e9-97f2-4420-a6b9-92418e78dd60-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.790276 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.810526 4797 generic.go:334] "Generic (PLEG): container finished" podID="a1480006-db62-4dfd-af3b-c394600f632c" containerID="6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20" exitCode=0 Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.810581 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerDied","Data":"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.810679 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" event={"ID":"a1480006-db62-4dfd-af3b-c394600f632c","Type":"ContainerDied","Data":"65582f47419050670384c2fc8a984893b34e2018329211e0c162d6dd88c5704d"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.810717 4797 scope.go:117] "RemoveContainer" containerID="6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.810993 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.827233 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d941aae2-1ed7-41ee-a2a3-38a23a9de6ff","Type":"ContainerDied","Data":"418b73115c456075c6de1d26ac6b118315cd9948a61c16d32530742173952ea4"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.827319 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840443 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840778 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840790 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840798 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener-log" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840805 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener-log" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840822 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="mysql-bootstrap" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840828 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="mysql-bootstrap" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840841 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-server" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840848 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-server" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840865 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="init" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840873 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="init" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840885 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="ovsdbserver-sb" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840893 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="ovsdbserver-sb" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840901 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840907 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840918 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840925 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840935 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="galera" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840940 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="galera" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840951 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker-log" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840957 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker-log" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840965 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-httpd" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840970 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-httpd" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840980 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.840990 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.840998 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="cinder-scheduler" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841017 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="cinder-scheduler" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.841028 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841034 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.841045 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="dnsmasq-dns" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841050 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="dnsmasq-dns" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.841058 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="probe" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841064 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="probe" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.841078 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="ovsdbserver-nb" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841084 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="ovsdbserver-nb" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.841092 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841119 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841310 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerName="galera" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841324 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841334 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="probe" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841344 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="ovsdbserver-nb" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841350 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener-log" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841359 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305d4e5-d5e2-4bac-85ec-568c06b92b98" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841367 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-server" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841373 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1480006-db62-4dfd-af3b-c394600f632c" containerName="barbican-keystone-listener" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841380 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="9354793e-2888-4fba-a93a-841077bd7270" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841388 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker-log" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841398 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d097117-3af5-47a7-bcbd-ad887d0972df" containerName="dnsmasq-dns" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841408 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="openstack-network-exporter" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841415 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerName="barbican-worker" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841425 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d84926f-3521-47ed-9581-a7beb6762e06" containerName="ovsdbserver-sb" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841433 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" containerName="proxy-httpd" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841441 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" containerName="cinder-scheduler" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.841946 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.843797 4797 generic.go:334] "Generic (PLEG): container finished" podID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" containerID="4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f" exitCode=0 Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.843870 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6ef2789-7ab1-46b1-852e-5bbb106b4044","Type":"ContainerDied","Data":"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.843909 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6ef2789-7ab1-46b1-852e-5bbb106b4044","Type":"ContainerDied","Data":"2127fbb355294cb08d15d7f40fa077e0ede282b05a12d346e356e92238f327fb"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.843947 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.844125 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.848434 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0a7e-account-create-update-7g9mc" event={"ID":"5da8a74d-b58c-4960-ac79-9b440f78fe73","Type":"ContainerDied","Data":"b9f1058be4ae070b78966016a0654760ea91852400a344fb5e36433566bc1cef"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.848509 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0a7e-account-create-update-7g9mc" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.850546 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e050-account-create-update-kbbx4" event={"ID":"b6693e0d-f1c0-49c2-8901-4eb6d7d56681","Type":"ContainerDied","Data":"3c5058a20316c589a657833302ddd78f24b977d4ca6aa28c5b937a6b3bb069ac"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.850614 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e050-account-create-update-kbbx4" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.851248 4797 scope.go:117] "RemoveContainer" containerID="fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.854429 4797 generic.go:334] "Generic (PLEG): container finished" podID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" containerID="ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a" exitCode=0 Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.854465 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerDied","Data":"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.854480 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6bfdff7977-xqk4d" event={"ID":"3571eba4-ffe7-46c9-a3ba-895a81b311a9","Type":"ContainerDied","Data":"f7ac70e6b68a51082afd56d60b02927d741e4680ca97aefcd3a70017764a23ac"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.854555 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6bfdff7977-xqk4d" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.863599 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.871524 4797 generic.go:334] "Generic (PLEG): container finished" podID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" containerID="0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba" exitCode=0 Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.871681 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.871859 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerDied","Data":"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.871910 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"72e6c6e9-97f2-4420-a6b9-92418e78dd60","Type":"ContainerDied","Data":"9be205198f51cb0a4cd3b06beccbbed5cb91576c2cfce549967de9b7f80d5632"} Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.873545 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.874050 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz22f\" (UniqueName: \"kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.874281 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.879324 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5746959b69-brph4" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.879899 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.895580 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.898059 4797 scope.go:117] "RemoveContainer" containerID="6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.900246 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.904644 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20\": container with ID starting with 6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20 not found: ID does not exist" containerID="6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.904683 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20"} err="failed to get container status \"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20\": rpc error: code = NotFound desc = could not find container \"6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20\": container with ID starting with 6028ec4cc32b40cbefcff387192fdae7ac119cccde179bd6f2fe58770cd8ac20 not found: ID does not exist" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.904711 4797 scope.go:117] "RemoveContainer" containerID="fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713" Jan 04 12:12:35 crc kubenswrapper[4797]: E0104 12:12:35.908527 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713\": container with ID starting with fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713 not found: ID does not exist" containerID="fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.908707 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713"} err="failed to get container status \"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713\": rpc error: code = NotFound desc = could not find container \"fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713\": container with ID starting with fe56f85751e052f3f129573ac47497abbf761b80f680c5ad2132754fa3b6f713 not found: ID does not exist" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.909477 4797 scope.go:117] "RemoveContainer" containerID="1881930c5a9519ddc49465313777fe56cca8210c13be4a63ce3f9a3424e5e91c" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.912798 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.929547 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-6bfdff7977-xqk4d"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.929588 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.932398 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.948108 4797 scope.go:117] "RemoveContainer" containerID="1105f111e259358a7b341195cb86cefe7de4dadda4bb97857fb143d53d1d387e" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.975942 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.976792 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.976863 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz22f\" (UniqueName: \"kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.977831 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:35 crc kubenswrapper[4797]: I0104 12:12:35.989316 4797 scope.go:117] "RemoveContainer" containerID="4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.012305 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-6ffb4dbc44-wqxfk"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.043045 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz22f\" (UniqueName: \"kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f\") pod \"root-account-create-update-pw258\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " pod="openstack/root-account-create-update-pw258" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.068666 4797 scope.go:117] "RemoveContainer" containerID="4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.068837 4797 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 04 12:12:36 crc kubenswrapper[4797]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-04T12:12:33Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 04 12:12:36 crc kubenswrapper[4797]: /etc/init.d/functions: line 589: 372 Alarm clock "$@" Jan 04 12:12:36 crc kubenswrapper[4797]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-8lx8k" message=< Jan 04 12:12:36 crc kubenswrapper[4797]: Exiting ovn-controller (1) [FAILED] Jan 04 12:12:36 crc kubenswrapper[4797]: Killing ovn-controller (1) [ OK ] Jan 04 12:12:36 crc kubenswrapper[4797]: 2026-01-04T12:12:33Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 04 12:12:36 crc kubenswrapper[4797]: /etc/init.d/functions: line 589: 372 Alarm clock "$@" Jan 04 12:12:36 crc kubenswrapper[4797]: > Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.068880 4797 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 04 12:12:36 crc kubenswrapper[4797]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-04T12:12:33Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 04 12:12:36 crc kubenswrapper[4797]: /etc/init.d/functions: line 589: 372 Alarm clock "$@" Jan 04 12:12:36 crc kubenswrapper[4797]: > pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" containerID="cri-o://69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.068919 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" containerID="cri-o://69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" gracePeriod=27 Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.070069 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f\": container with ID starting with 4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f not found: ID does not exist" containerID="4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.070102 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f"} err="failed to get container status \"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f\": rpc error: code = NotFound desc = could not find container \"4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f\": container with ID starting with 4d7cb696f10fdeee1bef6a5929787fec0c8a3a6a29b7bf80d48c53dfc8b0012f not found: ID does not exist" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.070122 4797 scope.go:117] "RemoveContainer" containerID="ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.070733 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.076471 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-0a7e-account-create-update-7g9mc"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.079079 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.079130 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data podName:d5065e47-366d-4fc9-9acb-f7691489b27d nodeName:}" failed. No retries permitted until 2026-01-04 12:12:40.079115615 +0000 UTC m=+1458.936302324 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data") pod "rabbitmq-cell1-server-0" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.136269 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.155563 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-5746959b69-brph4"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.173212 4797 scope.go:117] "RemoveContainer" containerID="fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.173371 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.176273 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pw258" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.193054 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e050-account-create-update-kbbx4"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.205049 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 is running failed: container process not found" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.210108 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 is running failed: container process not found" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.210207 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.214100 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 is running failed: container process not found" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.214142 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-8lx8k" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.214190 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.216814 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.231204 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.231269 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.238749 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.245642 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.248230 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.248304 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.269676 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:41558->10.217.0.178:9292: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.269756 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:41544->10.217.0.178:9292: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.275807 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.278146 4797 scope.go:117] "RemoveContainer" containerID="ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.286667 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a\": container with ID starting with ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a not found: ID does not exist" containerID="ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.288574 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a"} err="failed to get container status \"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a\": rpc error: code = NotFound desc = could not find container \"ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a\": container with ID starting with ae79b7f7c95cc20667df78d5712999d2f0b5d985fda4ecf0233f33aa2eca2b2a not found: ID does not exist" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.288606 4797 scope.go:117] "RemoveContainer" containerID="fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.302434 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154\": container with ID starting with fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154 not found: ID does not exist" containerID="fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.302477 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154"} err="failed to get container status \"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154\": rpc error: code = NotFound desc = could not find container \"fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154\": container with ID starting with fca8de44c14cce942a3bc4114815777aabc6bccb564dfc649f078bc24ded7154 not found: ID does not exist" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.302503 4797 scope.go:117] "RemoveContainer" containerID="0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.324258 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.324516 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-central-agent" containerID="cri-o://5e95861f05a86916d5bbae50b4b2c7dbc814b737dab7253f6895bdb7c85967b0" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.324841 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="proxy-httpd" containerID="cri-o://1c0fd3972d9964ce21b2d52308680c08c54401c7fccda3b5fdc0b6f2f4fc3b91" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.324879 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="sg-core" containerID="cri-o://c8a43ccc510e54f1604c2a6c7afb923307b8eda1f1c77a645703c76df3b2be10" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.324914 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-notification-agent" containerID="cri-o://4516e73ced3267162e550e03f293cb70e85913d200f7bae5f299b8b55ebea07c" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.342242 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.342525 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" containerName="kube-state-metrics" containerID="cri-o://2e3eef8a595abeb6c08dda235b265327b59e733829fd72a94fa5205605eee82c" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.369618 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.369919 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" containerName="memcached" containerID="cri-o://2a9eccf9a453b475692f8ed1731c1a41b538844deba6d050f2d590b9042ae5f2" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.397203 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.397232 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.397241 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.397251 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.397297 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:40.397281501 +0000 UTC m=+1459.254468210 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.400757 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-fe06-account-create-update-6qbzh"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.413286 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-fe06-account-create-update-6qbzh"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.422440 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-fe06-account-create-update-qrx7h"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.423859 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.425446 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.449860 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fe06-account-create-update-qrx7h"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.488663 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7tpfs"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.498277 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-wsnfd"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.498607 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-wsnfd"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.513499 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.513709 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-7d545b89c4-j2jcg" podUID="5d40c323-3444-4e84-8eb3-799d343c384d" containerName="keystone-api" containerID="cri-o://28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf" gracePeriod=30 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.514935 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpdq2\" (UniqueName: \"kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.515033 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.519194 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7tpfs"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.521276 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:47498->10.217.0.200:8775: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.521404 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:47508->10.217.0.200:8775: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.534306 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.547922 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.179:9292/healthcheck\": read tcp 10.217.0.2:57524->10.217.0.179:9292: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.548049 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.179:9292/healthcheck\": read tcp 10.217.0.2:57516->10.217.0.179:9292: read: connection reset by peer" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.555901 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.579844 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-dmxvk"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.595074 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-dmxvk"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.611034 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-fe06-account-create-update-qrx7h"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.616913 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpdq2\" (UniqueName: \"kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.617042 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.617179 4797 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.617224 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:37.117210135 +0000 UTC m=+1455.974396834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : configmap "openstack-scripts" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.622918 4797 projected.go:194] Error preparing data for projected volume kube-api-access-dpdq2 for pod openstack/keystone-fe06-account-create-update-qrx7h: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.622975 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2 podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:37.122958278 +0000 UTC m=+1455.980144987 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-dpdq2" (UniqueName: "kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.625393 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.697409 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.727771 4797 scope.go:117] "RemoveContainer" containerID="e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.759721 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-dpdq2 operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-fe06-account-create-update-qrx7h" podUID="8a849ec9-1af1-48d9-bcf6-856ea8904178" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.795404 4797 scope.go:117] "RemoveContainer" containerID="0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.795989 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba\": container with ID starting with 0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba not found: ID does not exist" containerID="0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.796191 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba"} err="failed to get container status \"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba\": rpc error: code = NotFound desc = could not find container \"0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba\": container with ID starting with 0cb247f2de7cb29a872829ee4902e7781c115c0920cdd8207ef23d994c53f8ba not found: ID does not exist" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.796216 4797 scope.go:117] "RemoveContainer" containerID="e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e" Jan 04 12:12:36 crc kubenswrapper[4797]: E0104 12:12:36.798435 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e\": container with ID starting with e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e not found: ID does not exist" containerID="e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.798459 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e"} err="failed to get container status \"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e\": rpc error: code = NotFound desc = could not find container \"e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e\": container with ID starting with e5609c44929bbb1531534eb84a24c3a69693d4153f6c19c84027208fc86f743e not found: ID does not exist" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.831530 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts\") pod \"9fc7d678-c699-4348-9654-5290b2d48bd3\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.831588 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.831706 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkdmd\" (UniqueName: \"kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd\") pod \"9fc7d678-c699-4348-9654-5290b2d48bd3\" (UID: \"9fc7d678-c699-4348-9654-5290b2d48bd3\") " Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.832331 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9fc7d678-c699-4348-9654-5290b2d48bd3" (UID: "9fc7d678-c699-4348-9654-5290b2d48bd3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.855203 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd" (OuterVolumeSpecName: "kube-api-access-wkdmd") pod "9fc7d678-c699-4348-9654-5290b2d48bd3" (UID: "9fc7d678-c699-4348-9654-5290b2d48bd3"). InnerVolumeSpecName "kube-api-access-wkdmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.937165 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc7d678-c699-4348-9654-5290b2d48bd3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.937206 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkdmd\" (UniqueName: \"kubernetes.io/projected/9fc7d678-c699-4348-9654-5290b2d48bd3-kube-api-access-wkdmd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.956233 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8lx8k_9ffb9045-87ff-4c59-ac14-5de55b6cd42e/ovn-controller/0.log" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.956291 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k" Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.971259 4797 generic.go:334] "Generic (PLEG): container finished" podID="f904f7de-5407-4427-a82c-e31b26195c0a" containerID="92b755c8a3261041cdba8ae7ff1475f229ccf920b10c9c7daf3c52db8f65c7c8" exitCode=0 Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.971354 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerDied","Data":"92b755c8a3261041cdba8ae7ff1475f229ccf920b10c9c7daf3c52db8f65c7c8"} Jan 04 12:12:36 crc kubenswrapper[4797]: I0104 12:12:36.975182 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="galera" containerID="cri-o://0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" gracePeriod=30 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.014517 4797 generic.go:334] "Generic (PLEG): container finished" podID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerID="90b02fc9419aacb467bc917b82b3cf36fa359aa8f7e3da9a5e9dd0d2acd2ff8b" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.014584 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerDied","Data":"90b02fc9419aacb467bc917b82b3cf36fa359aa8f7e3da9a5e9dd0d2acd2ff8b"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038054 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038336 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pwh4\" (UniqueName: \"kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038407 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038422 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038440 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038486 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038500 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run\") pod \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\" (UID: \"9ffb9045-87ff-4c59-ac14-5de55b6cd42e\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.038893 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run" (OuterVolumeSpecName: "var-run") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.039136 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.043076 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts" (OuterVolumeSpecName: "scripts") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.043147 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.058226 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4" (OuterVolumeSpecName: "kube-api-access-5pwh4") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "kube-api-access-5pwh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.072286 4797 generic.go:334] "Generic (PLEG): container finished" podID="c7bef264-130e-4b89-ae25-bff622d12a16" containerID="b3e1a2411d4524d7382c2ff29d5707c5d826d0551904e8a9f2a1500c0fabd198" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.073064 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerDied","Data":"b3e1a2411d4524d7382c2ff29d5707c5d826d0551904e8a9f2a1500c0fabd198"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.073083 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.099479 4797 generic.go:334] "Generic (PLEG): container finished" podID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerID="12a99c3f2374bcf465e4dea1ca963d10cfbd969365c68cb800d0058e6b3033aa" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.099567 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerDied","Data":"12a99c3f2374bcf465e4dea1ca963d10cfbd969365c68cb800d0058e6b3033aa"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.107777 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8lx8k_9ffb9045-87ff-4c59-ac14-5de55b6cd42e/ovn-controller/0.log" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.107819 4797 generic.go:334] "Generic (PLEG): container finished" podID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" exitCode=143 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.107898 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k" event={"ID":"9ffb9045-87ff-4c59-ac14-5de55b6cd42e","Type":"ContainerDied","Data":"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.107951 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8lx8k" event={"ID":"9ffb9045-87ff-4c59-ac14-5de55b6cd42e","Type":"ContainerDied","Data":"3bdb7ce84a0316f6b77bcb4876a493653157233c83e3edc1a992d6a330dbccc2"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.107968 4797 scope.go:117] "RemoveContainer" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.108062 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8lx8k" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.112699 4797 generic.go:334] "Generic (PLEG): container finished" podID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" containerID="2e3eef8a595abeb6c08dda235b265327b59e733829fd72a94fa5205605eee82c" exitCode=2 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.112746 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"35e31bb2-dc54-40fe-843a-6a89d4e91dda","Type":"ContainerDied","Data":"2e3eef8a595abeb6c08dda235b265327b59e733829fd72a94fa5205605eee82c"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.133190 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "9ffb9045-87ff-4c59-ac14-5de55b6cd42e" (UID: "9ffb9045-87ff-4c59-ac14-5de55b6cd42e"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.133914 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.134702 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dg6d4" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.137780 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dg6d4" event={"ID":"9fc7d678-c699-4348-9654-5290b2d48bd3","Type":"ContainerDied","Data":"3c1349e81d2bd05dc409186d5a7a05bc2626752d991d0f0abe3d9a705b3f3fae"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.139942 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpdq2\" (UniqueName: \"kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140129 4797 scope.go:117] "RemoveContainer" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.140199 4797 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.140244 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:38.140231633 +0000 UTC m=+1456.997418332 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : configmap "openstack-scripts" not found Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140252 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140367 4797 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140379 4797 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140388 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140396 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pwh4\" (UniqueName: \"kubernetes.io/projected/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-kube-api-access-5pwh4\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140405 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140412 4797 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.140421 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ffb9045-87ff-4c59-ac14-5de55b6cd42e-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.145503 4797 projected.go:194] Error preparing data for projected volume kube-api-access-dpdq2 for pod openstack/keystone-fe06-account-create-update-qrx7h: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.145565 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2 podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:38.145542974 +0000 UTC m=+1457.002729763 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-dpdq2" (UniqueName: "kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.153226 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6\": container with ID starting with 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 not found: ID does not exist" containerID="69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.153267 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6"} err="failed to get container status \"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6\": rpc error: code = NotFound desc = could not find container \"69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6\": container with ID starting with 69f13a63fb21d8066c8ec36bde13b0119c1c833a51bbecc8d17fa58ffbd5a3e6 not found: ID does not exist" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.159289 4797 generic.go:334] "Generic (PLEG): container finished" podID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerID="b087757871433f8a985387e4d7b03ea17d2cbf5af8c97d5436dc453c3de7fbde" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.159366 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerDied","Data":"b087757871433f8a985387e4d7b03ea17d2cbf5af8c97d5436dc453c3de7fbde"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.166467 4797 generic.go:334] "Generic (PLEG): container finished" podID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerID="195feab2fc5a30a29388929e5db7f342ff1fd19485ed5d54f1b0daa3a22494a5" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.166531 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerDied","Data":"195feab2fc5a30a29388929e5db7f342ff1fd19485ed5d54f1b0daa3a22494a5"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.170691 4797 generic.go:334] "Generic (PLEG): container finished" podID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerID="82da6920fae7841a168988c98103f1142bc6deaa632d2c919def4424335d4556" exitCode=0 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.170751 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerDied","Data":"82da6920fae7841a168988c98103f1142bc6deaa632d2c919def4424335d4556"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.173098 4797 generic.go:334] "Generic (PLEG): container finished" podID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerID="c8a43ccc510e54f1604c2a6c7afb923307b8eda1f1c77a645703c76df3b2be10" exitCode=2 Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.173165 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.173656 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerDied","Data":"c8a43ccc510e54f1604c2a6c7afb923307b8eda1f1c77a645703c76df3b2be10"} Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242609 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242675 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpzws\" (UniqueName: \"kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242727 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242837 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242856 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242890 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.242921 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs\") pod \"f904f7de-5407-4427-a82c-e31b26195c0a\" (UID: \"f904f7de-5407-4427-a82c-e31b26195c0a\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.243556 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs" (OuterVolumeSpecName: "logs") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.247407 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws" (OuterVolumeSpecName: "kube-api-access-mpzws") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "kube-api-access-mpzws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.254121 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts" (OuterVolumeSpecName: "scripts") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.287545 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.290590 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-dg6d4"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.345858 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f904f7de-5407-4427-a82c-e31b26195c0a-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.345887 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.345902 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpzws\" (UniqueName: \"kubernetes.io/projected/f904f7de-5407-4427-a82c-e31b26195c0a-kube-api-access-mpzws\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.371173 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.371388 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data" (OuterVolumeSpecName: "config-data") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.418485 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.446582 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.448939 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.449074 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.450679 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.460764 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.460825 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerName="nova-cell1-conductor-conductor" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.462390 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.465155 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.483462 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3571eba4-ffe7-46c9-a3ba-895a81b311a9" path="/var/lib/kubelet/pods/3571eba4-ffe7-46c9-a3ba-895a81b311a9/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.484095 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.484141 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eb4366c-79ee-4bc5-9434-8bf5134c0fe4" path="/var/lib/kubelet/pods/4eb4366c-79ee-4bc5-9434-8bf5134c0fe4/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.484608 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c520b8c-14d2-4c61-b791-7cdbd31b2191" path="/var/lib/kubelet/pods/5c520b8c-14d2-4c61-b791-7cdbd31b2191/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.485655 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5da8a74d-b58c-4960-ac79-9b440f78fe73" path="/var/lib/kubelet/pods/5da8a74d-b58c-4960-ac79-9b440f78fe73/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.486340 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f1faf93-a80e-424a-b37b-0dc4506c5716" path="/var/lib/kubelet/pods/6f1faf93-a80e-424a-b37b-0dc4506c5716/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.487088 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72e6c6e9-97f2-4420-a6b9-92418e78dd60" path="/var/lib/kubelet/pods/72e6c6e9-97f2-4420-a6b9-92418e78dd60/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.488334 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fc7d678-c699-4348-9654-5290b2d48bd3" path="/var/lib/kubelet/pods/9fc7d678-c699-4348-9654-5290b2d48bd3/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.489245 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1480006-db62-4dfd-af3b-c394600f632c" path="/var/lib/kubelet/pods/a1480006-db62-4dfd-af3b-c394600f632c/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.489884 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b242917a-c1a0-4747-a162-d4d13d917682" path="/var/lib/kubelet/pods/b242917a-c1a0-4747-a162-d4d13d917682/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.490394 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6693e0d-f1c0-49c2-8901-4eb6d7d56681" path="/var/lib/kubelet/pods/b6693e0d-f1c0-49c2-8901-4eb6d7d56681/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.490711 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ef2789-7ab1-46b1-852e-5bbb106b4044" path="/var/lib/kubelet/pods/b6ef2789-7ab1-46b1-852e-5bbb106b4044/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.491762 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c044e46d-b79e-4f22-be2d-98408745d63a" path="/var/lib/kubelet/pods/c044e46d-b79e-4f22-be2d-98408745d63a/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.492423 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d941aae2-1ed7-41ee-a2a3-38a23a9de6ff" path="/var/lib/kubelet/pods/d941aae2-1ed7-41ee-a2a3-38a23a9de6ff/volumes" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.518695 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.518799 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8lx8k"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.531489 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f904f7de-5407-4427-a82c-e31b26195c0a" (UID: "f904f7de-5407-4427-a82c-e31b26195c0a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.537484 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.538246 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.547810 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549566 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549602 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfw52\" (UniqueName: \"kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52\") pod \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549647 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549669 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs\") pod \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549688 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549796 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle\") pod \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549845 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82kzz\" (UniqueName: \"kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549885 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549905 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549921 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs\") pod \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\" (UID: \"5208dae4-fade-400c-a1a0-edbb3bf8d3dd\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.549952 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config\") pod \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\" (UID: \"35e31bb2-dc54-40fe-843a-6a89d4e91dda\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.550291 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.550303 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f904f7de-5407-4427-a82c-e31b26195c0a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.552710 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.553472 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs" (OuterVolumeSpecName: "logs") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.553717 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.555682 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz" (OuterVolumeSpecName: "kube-api-access-82kzz") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "kube-api-access-82kzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.559221 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.567256 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts" (OuterVolumeSpecName: "scripts") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.567266 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52" (OuterVolumeSpecName: "kube-api-access-cfw52") pod "35e31bb2-dc54-40fe-843a-6a89d4e91dda" (UID: "35e31bb2-dc54-40fe-843a-6a89d4e91dda"). InnerVolumeSpecName "kube-api-access-cfw52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.602565 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "35e31bb2-dc54-40fe-843a-6a89d4e91dda" (UID: "35e31bb2-dc54-40fe-843a-6a89d4e91dda"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.609015 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35e31bb2-dc54-40fe-843a-6a89d4e91dda" (UID: "35e31bb2-dc54-40fe-843a-6a89d4e91dda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.614299 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.634818 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653703 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653760 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653783 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk28t\" (UniqueName: \"kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653802 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653826 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w4db\" (UniqueName: \"kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db\") pod \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653847 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle\") pod \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653862 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653893 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653907 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653925 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl7lt\" (UniqueName: \"kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653947 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653968 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs\") pod \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.653990 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654022 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654049 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs\") pod \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654075 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654095 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654114 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654129 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654143 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654163 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654206 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28f2w\" (UniqueName: \"kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654226 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654240 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data\") pod \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\" (UID: \"5dd48a0b-cc19-4d03-9c3c-174d89f504c7\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654263 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654297 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs\") pod \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\" (UID: \"ad32d59a-781c-4c96-a9c3-e16c617da9b1\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654313 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654332 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs\") pod \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\" (UID: \"6fd01dbb-d505-4555-a9b2-d9b9334aae11\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654349 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs\") pod \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\" (UID: \"d6d5dd79-6901-4d58-a419-1a6d2e352ab3\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654670 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654681 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82kzz\" (UniqueName: \"kubernetes.io/projected/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-kube-api-access-82kzz\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654675 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs" (OuterVolumeSpecName: "logs") pod "5dd48a0b-cc19-4d03-9c3c-174d89f504c7" (UID: "5dd48a0b-cc19-4d03-9c3c-174d89f504c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654690 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654725 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654741 4797 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654769 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654786 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfw52\" (UniqueName: \"kubernetes.io/projected/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-api-access-cfw52\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654799 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654812 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.654826 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.662109 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t" (OuterVolumeSpecName: "kube-api-access-jk28t") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "kube-api-access-jk28t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.662327 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.662947 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.663184 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.664489 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs" (OuterVolumeSpecName: "logs") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.664777 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs" (OuterVolumeSpecName: "logs") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.665048 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs" (OuterVolumeSpecName: "logs") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.676168 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.676192 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt" (OuterVolumeSpecName: "kube-api-access-jl7lt") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "kube-api-access-jl7lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.677244 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts" (OuterVolumeSpecName: "scripts") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.677418 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w" (OuterVolumeSpecName: "kube-api-access-28f2w") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "kube-api-access-28f2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.685440 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.685754 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db" (OuterVolumeSpecName: "kube-api-access-5w4db") pod "5dd48a0b-cc19-4d03-9c3c-174d89f504c7" (UID: "5dd48a0b-cc19-4d03-9c3c-174d89f504c7"). InnerVolumeSpecName "kube-api-access-5w4db". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.686404 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts" (OuterVolumeSpecName: "scripts") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.756379 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28f2w\" (UniqueName: \"kubernetes.io/projected/6fd01dbb-d505-4555-a9b2-d9b9334aae11-kube-api-access-28f2w\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.761716 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd01dbb-d505-4555-a9b2-d9b9334aae11-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.761807 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.761880 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.761944 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk28t\" (UniqueName: \"kubernetes.io/projected/ad32d59a-781c-4c96-a9c3-e16c617da9b1-kube-api-access-jk28t\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762027 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w4db\" (UniqueName: \"kubernetes.io/projected/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-kube-api-access-5w4db\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762299 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762390 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl7lt\" (UniqueName: \"kubernetes.io/projected/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-kube-api-access-jl7lt\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762531 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762602 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762664 4797 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762721 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762775 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762840 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.762920 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad32d59a-781c-4c96-a9c3-e16c617da9b1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.779625 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.785336 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5dd48a0b-cc19-4d03-9c3c-174d89f504c7" (UID: "5dd48a0b-cc19-4d03-9c3c-174d89f504c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.787039 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.797120 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "35e31bb2-dc54-40fe-843a-6a89d4e91dda" (UID: "35e31bb2-dc54-40fe-843a-6a89d4e91dda"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.798176 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.798876 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.812657 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.837256 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data" (OuterVolumeSpecName: "config-data") pod "5208dae4-fade-400c-a1a0-edbb3bf8d3dd" (UID: "5208dae4-fade-400c-a1a0-edbb3bf8d3dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.860146 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data" (OuterVolumeSpecName: "config-data") pod "5dd48a0b-cc19-4d03-9c3c-174d89f504c7" (UID: "5dd48a0b-cc19-4d03-9c3c-174d89f504c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.863697 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.863977 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.864053 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9xdd\" (UniqueName: \"kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.864080 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.864107 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.864304 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs\") pod \"c7bef264-130e-4b89-ae25-bff622d12a16\" (UID: \"c7bef264-130e-4b89-ae25-bff622d12a16\") " Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.864300 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs" (OuterVolumeSpecName: "logs") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865539 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865558 4797 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/35e31bb2-dc54-40fe-843a-6a89d4e91dda-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865568 4797 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7bef264-130e-4b89-ae25-bff622d12a16-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865577 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865585 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865594 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865602 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865610 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.865618 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5208dae4-fade-400c-a1a0-edbb3bf8d3dd-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.868472 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd" (OuterVolumeSpecName: "kube-api-access-l9xdd") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "kube-api-access-l9xdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.884054 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.884724 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.891499 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.895454 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.903400 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.942248 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data" (OuterVolumeSpecName: "config-data") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: W0104 12:12:37.948909 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15365bbd_c14c_41d4_9b66_b6f645749898.slice/crio-dcf708113a249d270b1dbcc2b402e9d110d5ffc9372ef2e99452c081530b99d5 WatchSource:0}: Error finding container dcf708113a249d270b1dbcc2b402e9d110d5ffc9372ef2e99452c081530b99d5: Status 404 returned error can't find the container with id dcf708113a249d270b1dbcc2b402e9d110d5ffc9372ef2e99452c081530b99d5 Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.952386 4797 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:12:37 crc kubenswrapper[4797]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: if [ -n "" ]; then Jan 04 12:12:37 crc kubenswrapper[4797]: GRANT_DATABASE="" Jan 04 12:12:37 crc kubenswrapper[4797]: else Jan 04 12:12:37 crc kubenswrapper[4797]: GRANT_DATABASE="*" Jan 04 12:12:37 crc kubenswrapper[4797]: fi Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: # going for maximum compatibility here: Jan 04 12:12:37 crc kubenswrapper[4797]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:12:37 crc kubenswrapper[4797]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:12:37 crc kubenswrapper[4797]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:12:37 crc kubenswrapper[4797]: # support updates Jan 04 12:12:37 crc kubenswrapper[4797]: Jan 04 12:12:37 crc kubenswrapper[4797]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:12:37 crc kubenswrapper[4797]: E0104 12:12:37.953865 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-pw258" podUID="15365bbd-c14c-41d4-9b66-b6f645749898" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.955948 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5dd48a0b-cc19-4d03-9c3c-174d89f504c7" (UID: "5dd48a0b-cc19-4d03-9c3c-174d89f504c7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.956316 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data" (OuterVolumeSpecName: "config-data") pod "d6d5dd79-6901-4d58-a419-1a6d2e352ab3" (UID: "d6d5dd79-6901-4d58-a419-1a6d2e352ab3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.960097 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data" (OuterVolumeSpecName: "config-data") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.961239 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data" (OuterVolumeSpecName: "config-data") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.964883 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ad32d59a-781c-4c96-a9c3-e16c617da9b1" (UID: "ad32d59a-781c-4c96-a9c3-e16c617da9b1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968872 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968896 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968906 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9xdd\" (UniqueName: \"kubernetes.io/projected/c7bef264-130e-4b89-ae25-bff622d12a16-kube-api-access-l9xdd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968914 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968923 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968930 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968938 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6d5dd79-6901-4d58-a419-1a6d2e352ab3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968946 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968955 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968963 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad32d59a-781c-4c96-a9c3-e16c617da9b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.968971 4797 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dd48a0b-cc19-4d03-9c3c-174d89f504c7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.975637 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6fd01dbb-d505-4555-a9b2-d9b9334aae11" (UID: "6fd01dbb-d505-4555-a9b2-d9b9334aae11"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.976139 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.976176 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:37 crc kubenswrapper[4797]: I0104 12:12:37.984110 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c7bef264-130e-4b89-ae25-bff622d12a16" (UID: "c7bef264-130e-4b89-ae25-bff622d12a16"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.074113 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.074380 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd01dbb-d505-4555-a9b2-d9b9334aae11-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.074391 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.074399 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7bef264-130e-4b89-ae25-bff622d12a16-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.154061 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.175898 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.176323 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpdq2\" (UniqueName: \"kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2\") pod \"keystone-fe06-account-create-update-qrx7h\" (UID: \"8a849ec9-1af1-48d9-bcf6-856ea8904178\") " pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.176727 4797 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.176779 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:40.176761421 +0000 UTC m=+1459.033948140 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : configmap "openstack-scripts" not found Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.184846 4797 projected.go:194] Error preparing data for projected volume kube-api-access-dpdq2 for pod openstack/keystone-fe06-account-create-update-qrx7h: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.184911 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2 podName:8a849ec9-1af1-48d9-bcf6-856ea8904178 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:40.184894823 +0000 UTC m=+1459.042081542 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-dpdq2" (UniqueName: "kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2") pod "keystone-fe06-account-create-update-qrx7h" (UID: "8a849ec9-1af1-48d9-bcf6-856ea8904178") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.189962 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-pw258" event={"ID":"15365bbd-c14c-41d4-9b66-b6f645749898","Type":"ContainerStarted","Data":"dcf708113a249d270b1dbcc2b402e9d110d5ffc9372ef2e99452c081530b99d5"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.193678 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c675d9b9b-9fg4r" event={"ID":"6fd01dbb-d505-4555-a9b2-d9b9334aae11","Type":"ContainerDied","Data":"7eea30a9665ea6482ee0ce5f14ac5320fae71817557119980ebba26d32335361"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.193707 4797 scope.go:117] "RemoveContainer" containerID="90b02fc9419aacb467bc917b82b3cf36fa359aa8f7e3da9a5e9dd0d2acd2ff8b" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.193812 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c675d9b9b-9fg4r" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.212447 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"35e31bb2-dc54-40fe-843a-6a89d4e91dda","Type":"ContainerDied","Data":"98a71f0894e170f51a1fa453465be098592028fea532f5129370897949ccf2aa"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.212518 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.217964 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_78a313c1-13df-4f65-9b14-4d9ee83d637c/ovn-northd/0.log" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.218030 4797 generic.go:334] "Generic (PLEG): container finished" podID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerID="58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" exitCode=139 Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.218077 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerDied","Data":"58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.221233 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5208dae4-fade-400c-a1a0-edbb3bf8d3dd","Type":"ContainerDied","Data":"92fad1304fac4c970c9bdf7a5b6e47169e209fc38555102ec90fa0f047a27435"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.221304 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.249281 4797 generic.go:334] "Generic (PLEG): container finished" podID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerID="1c0fd3972d9964ce21b2d52308680c08c54401c7fccda3b5fdc0b6f2f4fc3b91" exitCode=0 Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.249311 4797 generic.go:334] "Generic (PLEG): container finished" podID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerID="5e95861f05a86916d5bbae50b4b2c7dbc814b737dab7253f6895bdb7c85967b0" exitCode=0 Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.249375 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerDied","Data":"1c0fd3972d9964ce21b2d52308680c08c54401c7fccda3b5fdc0b6f2f4fc3b91"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.249461 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerDied","Data":"5e95861f05a86916d5bbae50b4b2c7dbc814b737dab7253f6895bdb7c85967b0"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.253574 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5dd48a0b-cc19-4d03-9c3c-174d89f504c7","Type":"ContainerDied","Data":"0ac1b0aba7d680563956c694f04467c4de6d473590db594edb1f534eb4764778"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.253674 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.267346 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-596bb9655b-hsz7j" event={"ID":"f904f7de-5407-4427-a82c-e31b26195c0a","Type":"ContainerDied","Data":"9fd443c8b12b79f5170075de4e6b779c43b6cf2d22c4dadf5b76a0ae32eab5ca"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.267455 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-596bb9655b-hsz7j" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.275236 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d6d5dd79-6901-4d58-a419-1a6d2e352ab3","Type":"ContainerDied","Data":"2bef0afc0189b44a53c78411bbab4568c071b7676673fcac3d2cb05957daa757"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.275253 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.281300 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data\") pod \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.281453 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle\") pod \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.281562 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2g5w\" (UniqueName: \"kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w\") pod \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\" (UID: \"970e7570-2ccd-4420-8e1f-70aff6cf2f38\") " Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.292643 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.299953 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.305212 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.305282 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="galera" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.306689 4797 generic.go:334] "Generic (PLEG): container finished" podID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" containerID="2a9eccf9a453b475692f8ed1731c1a41b538844deba6d050f2d590b9042ae5f2" exitCode=0 Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.306789 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"102e7d3d-5368-4d87-ba33-874aeed5eaa9","Type":"ContainerDied","Data":"2a9eccf9a453b475692f8ed1731c1a41b538844deba6d050f2d590b9042ae5f2"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.310969 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w" (OuterVolumeSpecName: "kube-api-access-f2g5w") pod "970e7570-2ccd-4420-8e1f-70aff6cf2f38" (UID: "970e7570-2ccd-4420-8e1f-70aff6cf2f38"). InnerVolumeSpecName "kube-api-access-f2g5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.310978 4797 generic.go:334] "Generic (PLEG): container finished" podID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" exitCode=0 Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.311394 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"970e7570-2ccd-4420-8e1f-70aff6cf2f38","Type":"ContainerDied","Data":"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.311445 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"970e7570-2ccd-4420-8e1f-70aff6cf2f38","Type":"ContainerDied","Data":"c4ba556fc45b42f07a38f347b8726e2718bf48adc4ce15a78f26879fc7b36129"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.311547 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.319266 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7bef264-130e-4b89-ae25-bff622d12a16","Type":"ContainerDied","Data":"b184fed43afcf9d8ca7dc2f24b4ae0105023b2b4e798de2ff00864a697986fab"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.319424 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.330352 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fe06-account-create-update-qrx7h" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.330562 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.330470 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad32d59a-781c-4c96-a9c3-e16c617da9b1","Type":"ContainerDied","Data":"860c56f0ef4f536ba402454ce8600892b210ad29396896e9e1c61e0dcc8a1dc3"} Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.359697 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data" (OuterVolumeSpecName: "config-data") pod "970e7570-2ccd-4420-8e1f-70aff6cf2f38" (UID: "970e7570-2ccd-4420-8e1f-70aff6cf2f38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.367257 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "970e7570-2ccd-4420-8e1f-70aff6cf2f38" (UID: "970e7570-2ccd-4420-8e1f-70aff6cf2f38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.385273 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.385303 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2g5w\" (UniqueName: \"kubernetes.io/projected/970e7570-2ccd-4420-8e1f-70aff6cf2f38-kube-api-access-f2g5w\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.385315 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/970e7570-2ccd-4420-8e1f-70aff6cf2f38-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.394183 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.396177 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.398213 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.398272 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerName="nova-scheduler-scheduler" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.435730 4797 scope.go:117] "RemoveContainer" containerID="3e3a15f869a29fb29c3c3b83c8c750fb50a7d3d3675123fde9492287428afb82" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.436592 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.479592 4797 scope.go:117] "RemoveContainer" containerID="2e3eef8a595abeb6c08dda235b265327b59e733829fd72a94fa5205605eee82c" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.487417 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle\") pod \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.487496 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config\") pod \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.487570 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data\") pod \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.487600 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxs9n\" (UniqueName: \"kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n\") pod \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.487627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs\") pod \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\" (UID: \"102e7d3d-5368-4d87-ba33-874aeed5eaa9\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.493397 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.494088 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data" (OuterVolumeSpecName: "config-data") pod "102e7d3d-5368-4d87-ba33-874aeed5eaa9" (UID: "102e7d3d-5368-4d87-ba33-874aeed5eaa9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.494467 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "102e7d3d-5368-4d87-ba33-874aeed5eaa9" (UID: "102e7d3d-5368-4d87-ba33-874aeed5eaa9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.498038 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.498065 4797 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.518233 4797 scope.go:117] "RemoveContainer" containerID="12a99c3f2374bcf465e4dea1ca963d10cfbd969365c68cb800d0058e6b3033aa" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.518688 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.526176 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n" (OuterVolumeSpecName: "kube-api-access-wxs9n") pod "102e7d3d-5368-4d87-ba33-874aeed5eaa9" (UID: "102e7d3d-5368-4d87-ba33-874aeed5eaa9"). InnerVolumeSpecName "kube-api-access-wxs9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.532072 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "102e7d3d-5368-4d87-ba33-874aeed5eaa9" (UID: "102e7d3d-5368-4d87-ba33-874aeed5eaa9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.533098 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.565182 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.588100 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.601727 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxs9n\" (UniqueName: \"kubernetes.io/projected/102e7d3d-5368-4d87-ba33-874aeed5eaa9-kube-api-access-wxs9n\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.601759 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.601961 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-596bb9655b-hsz7j"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.617109 4797 scope.go:117] "RemoveContainer" containerID="180cd379c89ba5c47f18c6ef1c23999a1a2f27dccb309fa51f167df314999171" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.619206 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-fe06-account-create-update-qrx7h"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.633798 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-fe06-account-create-update-qrx7h"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.646047 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.653033 4797 scope.go:117] "RemoveContainer" containerID="b087757871433f8a985387e4d7b03ea17d2cbf5af8c97d5436dc453c3de7fbde" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.656551 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7c675d9b9b-9fg4r"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.663564 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.665122 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "102e7d3d-5368-4d87-ba33-874aeed5eaa9" (UID: "102e7d3d-5368-4d87-ba33-874aeed5eaa9"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.683568 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.687953 4797 scope.go:117] "RemoveContainer" containerID="d84880cf79913873bb87aa245c4a430e9d1ba2de7f2b46e8c142037646d023ef" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.688099 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.693207 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pw258" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.693569 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.699776 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.703118 4797 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/102e7d3d-5368-4d87-ba33-874aeed5eaa9-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.703146 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a849ec9-1af1-48d9-bcf6-856ea8904178-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.703156 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpdq2\" (UniqueName: \"kubernetes.io/projected/8a849ec9-1af1-48d9-bcf6-856ea8904178-kube-api-access-dpdq2\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.705703 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.711244 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.715867 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.719684 4797 scope.go:117] "RemoveContainer" containerID="92b755c8a3261041cdba8ae7ff1475f229ccf920b10c9c7daf3c52db8f65c7c8" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.720958 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.728319 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.753208 4797 scope.go:117] "RemoveContainer" containerID="b36b3a17559f71b1c9cfe74bbb4df7e910711b7ff1898a03c491af436d97b4ee" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.777293 4797 scope.go:117] "RemoveContainer" containerID="82da6920fae7841a168988c98103f1142bc6deaa632d2c919def4424335d4556" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.801216 4797 scope.go:117] "RemoveContainer" containerID="9a78a250903e07d598e88432420c861a2b78d03523ca1f130ae64e7cae2c30a0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.804625 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz22f\" (UniqueName: \"kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f\") pod \"15365bbd-c14c-41d4-9b66-b6f645749898\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.804806 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts\") pod \"15365bbd-c14c-41d4-9b66-b6f645749898\" (UID: \"15365bbd-c14c-41d4-9b66-b6f645749898\") " Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.805236 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "15365bbd-c14c-41d4-9b66-b6f645749898" (UID: "15365bbd-c14c-41d4-9b66-b6f645749898"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.808291 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f" (OuterVolumeSpecName: "kube-api-access-lz22f") pod "15365bbd-c14c-41d4-9b66-b6f645749898" (UID: "15365bbd-c14c-41d4-9b66-b6f645749898"). InnerVolumeSpecName "kube-api-access-lz22f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.819239 4797 scope.go:117] "RemoveContainer" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.843231 4797 scope.go:117] "RemoveContainer" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" Jan 04 12:12:38 crc kubenswrapper[4797]: E0104 12:12:38.844009 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2\": container with ID starting with 1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2 not found: ID does not exist" containerID="1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.844054 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2"} err="failed to get container status \"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2\": rpc error: code = NotFound desc = could not find container \"1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2\": container with ID starting with 1c70ecd2fc031df13ccc0156da12098717f071f424765fe04374fcd34b4c3ac2 not found: ID does not exist" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.844079 4797 scope.go:117] "RemoveContainer" containerID="b3e1a2411d4524d7382c2ff29d5707c5d826d0551904e8a9f2a1500c0fabd198" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.865072 4797 scope.go:117] "RemoveContainer" containerID="4ffd1060deed2e4837e76d50f06b6b55d7a3082c39494946544e96c70503bbad" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.888436 4797 scope.go:117] "RemoveContainer" containerID="195feab2fc5a30a29388929e5db7f342ff1fd19485ed5d54f1b0daa3a22494a5" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.897654 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_78a313c1-13df-4f65-9b14-4d9ee83d637c/ovn-northd/0.log" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.897723 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.911100 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz22f\" (UniqueName: \"kubernetes.io/projected/15365bbd-c14c-41d4-9b66-b6f645749898-kube-api-access-lz22f\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.911129 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15365bbd-c14c-41d4-9b66-b6f645749898-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.919937 4797 scope.go:117] "RemoveContainer" containerID="dc4a21b1db6be2e07ca76bdb6c4c04513e5342187c0ece820874f1a86da403bc" Jan 04 12:12:38 crc kubenswrapper[4797]: I0104 12:12:38.982749 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": dial tcp 10.217.0.196:3000: connect: connection refused" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.012334 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtbhg\" (UniqueName: \"kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.012389 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.012414 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.012905 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.012957 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.013032 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.013109 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.013218 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts\") pod \"78a313c1-13df-4f65-9b14-4d9ee83d637c\" (UID: \"78a313c1-13df-4f65-9b14-4d9ee83d637c\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.013273 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config" (OuterVolumeSpecName: "config") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.014138 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.014158 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.015931 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg" (OuterVolumeSpecName: "kube-api-access-wtbhg") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "kube-api-access-wtbhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.013863 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts" (OuterVolumeSpecName: "scripts") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.043895 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.075271 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.088213 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "78a313c1-13df-4f65-9b14-4d9ee83d637c" (UID: "78a313c1-13df-4f65-9b14-4d9ee83d637c"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.115876 4797 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.115909 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78a313c1-13df-4f65-9b14-4d9ee83d637c-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.115919 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtbhg\" (UniqueName: \"kubernetes.io/projected/78a313c1-13df-4f65-9b14-4d9ee83d637c-kube-api-access-wtbhg\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.115929 4797 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.115938 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a313c1-13df-4f65-9b14-4d9ee83d637c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.189538 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.191230 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.192269 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.192309 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerName="nova-cell0-conductor-conductor" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.350252 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.357126 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"102e7d3d-5368-4d87-ba33-874aeed5eaa9","Type":"ContainerDied","Data":"581283018b89c218e4e5ed02a9fb4fa5128f43172ca416163f25d6c14cd70c26"} Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.357159 4797 scope.go:117] "RemoveContainer" containerID="2a9eccf9a453b475692f8ed1731c1a41b538844deba6d050f2d590b9042ae5f2" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.357212 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.376025 4797 generic.go:334] "Generic (PLEG): container finished" podID="c1955fef-0f64-4332-b967-c50875302a97" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" exitCode=0 Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.376068 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.376076 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerDied","Data":"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717"} Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.376643 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1955fef-0f64-4332-b967-c50875302a97","Type":"ContainerDied","Data":"9c5c0c5c57dc1b5326ec40f64bc36adcb8d3e1b7a4c02d8e8e54eab46f6cc4c1"} Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.386513 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_78a313c1-13df-4f65-9b14-4d9ee83d637c/ovn-northd/0.log" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.386589 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"78a313c1-13df-4f65-9b14-4d9ee83d637c","Type":"ContainerDied","Data":"c50bef501c809c20be6a4846aacee0622f1dcd7827f65a9f04cfa86cbfb87b09"} Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.386713 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.395508 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-pw258" event={"ID":"15365bbd-c14c-41d4-9b66-b6f645749898","Type":"ContainerDied","Data":"dcf708113a249d270b1dbcc2b402e9d110d5ffc9372ef2e99452c081530b99d5"} Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.395612 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pw258" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.402083 4797 scope.go:117] "RemoveContainer" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.403083 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.408456 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426468 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426497 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426517 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426565 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426596 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426638 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xptj2\" (UniqueName: \"kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426689 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.426710 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated\") pod \"c1955fef-0f64-4332-b967-c50875302a97\" (UID: \"c1955fef-0f64-4332-b967-c50875302a97\") " Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.427054 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.427097 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data podName:1414255a-a94a-4508-aa55-4ad9837afbea nodeName:}" failed. No retries permitted until 2026-01-04 12:12:47.427084251 +0000 UTC m=+1466.284270960 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data") pod "rabbitmq-server-0" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea") : configmap "rabbitmq-config-data" not found Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.427347 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.428112 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.433496 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.434277 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.436739 4797 scope.go:117] "RemoveContainer" containerID="96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.447862 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.455586 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-pw258"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.464167 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.464248 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.478342 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2" (OuterVolumeSpecName: "kube-api-access-xptj2") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "kube-api-access-xptj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.505026 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.508277 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.511595 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "c1955fef-0f64-4332-b967-c50875302a97" (UID: "c1955fef-0f64-4332-b967-c50875302a97"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.517858 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" path="/var/lib/kubelet/pods/102e7d3d-5368-4d87-ba33-874aeed5eaa9/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.518683 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15365bbd-c14c-41d4-9b66-b6f645749898" path="/var/lib/kubelet/pods/15365bbd-c14c-41d4-9b66-b6f645749898/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.519120 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" path="/var/lib/kubelet/pods/35e31bb2-dc54-40fe-843a-6a89d4e91dda/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.519742 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" path="/var/lib/kubelet/pods/5208dae4-fade-400c-a1a0-edbb3bf8d3dd/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.520936 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" path="/var/lib/kubelet/pods/5dd48a0b-cc19-4d03-9c3c-174d89f504c7/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.522052 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" path="/var/lib/kubelet/pods/6fd01dbb-d505-4555-a9b2-d9b9334aae11/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.523059 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" path="/var/lib/kubelet/pods/78a313c1-13df-4f65-9b14-4d9ee83d637c/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.523606 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a849ec9-1af1-48d9-bcf6-856ea8904178" path="/var/lib/kubelet/pods/8a849ec9-1af1-48d9-bcf6-856ea8904178/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.523936 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" path="/var/lib/kubelet/pods/970e7570-2ccd-4420-8e1f-70aff6cf2f38/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.524947 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" path="/var/lib/kubelet/pods/9ffb9045-87ff-4c59-ac14-5de55b6cd42e/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.525665 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" path="/var/lib/kubelet/pods/ad32d59a-781c-4c96-a9c3-e16c617da9b1/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.526333 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" path="/var/lib/kubelet/pods/c7bef264-130e-4b89-ae25-bff622d12a16/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.527366 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" path="/var/lib/kubelet/pods/d6d5dd79-6901-4d58-a419-1a6d2e352ab3/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.527938 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" path="/var/lib/kubelet/pods/f904f7de-5407-4427-a82c-e31b26195c0a/volumes" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529732 4797 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529765 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529795 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529804 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529813 4797 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1955fef-0f64-4332-b967-c50875302a97-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529822 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xptj2\" (UniqueName: \"kubernetes.io/projected/c1955fef-0f64-4332-b967-c50875302a97-kube-api-access-xptj2\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529832 4797 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1955fef-0f64-4332-b967-c50875302a97-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.529842 4797 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1955fef-0f64-4332-b967-c50875302a97-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.544038 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.572678 4797 scope.go:117] "RemoveContainer" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.573144 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717\": container with ID starting with 0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717 not found: ID does not exist" containerID="0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.573250 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717"} err="failed to get container status \"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717\": rpc error: code = NotFound desc = could not find container \"0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717\": container with ID starting with 0c740f0c8379d75459da466b50fd09b336e81027db5e20b66e67069dfbcaf717 not found: ID does not exist" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.573310 4797 scope.go:117] "RemoveContainer" containerID="96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444" Jan 04 12:12:39 crc kubenswrapper[4797]: E0104 12:12:39.573664 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444\": container with ID starting with 96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444 not found: ID does not exist" containerID="96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.573740 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444"} err="failed to get container status \"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444\": rpc error: code = NotFound desc = could not find container \"96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444\": container with ID starting with 96aa1acaf5935f2d42da38fa7509740b3bf3ceb39bf99961bf662f2c7bb0f444 not found: ID does not exist" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.573789 4797 scope.go:117] "RemoveContainer" containerID="b2a3aa87e9987ca244304550b8c7cb9d2bc8fd403c42e8b33895b09bf3e9a6f0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.596335 4797 scope.go:117] "RemoveContainer" containerID="58ece0e6e7fe2461330ba3cf72113283e7cd4f2a34ed8c85f8d664e6e1e45d47" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.631459 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.706931 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.707070 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.863768 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940087 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940149 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940194 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940217 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940331 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940367 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940403 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djp9v\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940427 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940464 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940511 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940536 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf\") pod \"1414255a-a94a-4508-aa55-4ad9837afbea\" (UID: \"1414255a-a94a-4508-aa55-4ad9837afbea\") " Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.940821 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.941190 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.941494 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.941726 4797 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.941742 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.941753 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.943358 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info" (OuterVolumeSpecName: "pod-info") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.944357 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v" (OuterVolumeSpecName: "kube-api-access-djp9v") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "kube-api-access-djp9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.944617 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.944702 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.953267 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.983210 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data" (OuterVolumeSpecName: "config-data") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:39 crc kubenswrapper[4797]: I0104 12:12:39.998858 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf" (OuterVolumeSpecName: "server-conf") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.031197 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1414255a-a94a-4508-aa55-4ad9837afbea" (UID: "1414255a-a94a-4508-aa55-4ad9837afbea"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046039 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046075 4797 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1414255a-a94a-4508-aa55-4ad9837afbea-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046106 4797 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1414255a-a94a-4508-aa55-4ad9837afbea-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046120 4797 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1414255a-a94a-4508-aa55-4ad9837afbea-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046155 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046167 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046178 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.046189 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djp9v\" (UniqueName: \"kubernetes.io/projected/1414255a-a94a-4508-aa55-4ad9837afbea-kube-api-access-djp9v\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.063627 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.147672 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.147791 4797 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.147868 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data podName:d5065e47-366d-4fc9-9acb-f7691489b27d nodeName:}" failed. No retries permitted until 2026-01-04 12:12:48.147850713 +0000 UTC m=+1467.005037422 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data") pod "rabbitmq-cell1-server-0" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.164486 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.248474 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.248840 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.248898 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.248935 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.249420 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.249467 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.249523 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.249540 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc7sx\" (UniqueName: \"kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx\") pod \"5d40c323-3444-4e84-8eb3-799d343c384d\" (UID: \"5d40c323-3444-4e84-8eb3-799d343c384d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.252572 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.253167 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts" (OuterVolumeSpecName: "scripts") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.262214 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx" (OuterVolumeSpecName: "kube-api-access-jc7sx") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "kube-api-access-jc7sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.267516 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.301100 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data" (OuterVolumeSpecName: "config-data") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.302140 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.320591 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.329469 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5d40c323-3444-4e84-8eb3-799d343c384d" (UID: "5d40c323-3444-4e84-8eb3-799d343c384d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351735 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351777 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351805 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351815 4797 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351828 4797 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351838 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351852 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d40c323-3444-4e84-8eb3-799d343c384d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.351864 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc7sx\" (UniqueName: \"kubernetes.io/projected/5d40c323-3444-4e84-8eb3-799d343c384d-kube-api-access-jc7sx\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.409322 4797 generic.go:334] "Generic (PLEG): container finished" podID="5d40c323-3444-4e84-8eb3-799d343c384d" containerID="28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf" exitCode=0 Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.409367 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d545b89c4-j2jcg" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.409367 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d545b89c4-j2jcg" event={"ID":"5d40c323-3444-4e84-8eb3-799d343c384d","Type":"ContainerDied","Data":"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf"} Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.409743 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d545b89c4-j2jcg" event={"ID":"5d40c323-3444-4e84-8eb3-799d343c384d","Type":"ContainerDied","Data":"718348ab379f9dde9db5d5973a417cbdca056f788e7795a70a4572145188e422"} Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.409765 4797 scope.go:117] "RemoveContainer" containerID="28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.413434 4797 generic.go:334] "Generic (PLEG): container finished" podID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerID="6c2bd4e26c7793a7e6748b52de489f7127e21a2457e862fa1b66701b1c8d40a1" exitCode=0 Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.413499 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerDied","Data":"6c2bd4e26c7793a7e6748b52de489f7127e21a2457e862fa1b66701b1c8d40a1"} Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.416623 4797 generic.go:334] "Generic (PLEG): container finished" podID="1414255a-a94a-4508-aa55-4ad9837afbea" containerID="47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9" exitCode=0 Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.416674 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerDied","Data":"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9"} Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.416693 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1414255a-a94a-4508-aa55-4ad9837afbea","Type":"ContainerDied","Data":"6030299dc435f9ffcfd7689657cf8b99ec0c84d025c97d869e16e8efea13d8c9"} Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.416756 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.453334 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.453372 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.453384 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.453397 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.453454 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:12:48.453434918 +0000 UTC m=+1467.310621627 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.524322 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.529512 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7d545b89c4-j2jcg"] Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.534734 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.542106 4797 scope.go:117] "RemoveContainer" containerID="28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf" Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.542439 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf\": container with ID starting with 28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf not found: ID does not exist" containerID="28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.542471 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf"} err="failed to get container status \"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf\": rpc error: code = NotFound desc = could not find container \"28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf\": container with ID starting with 28344c07a0b4e952636b02bdab9ccaa3d85886d81dd56bb71c417b520aa94ccf not found: ID does not exist" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.542490 4797 scope.go:117] "RemoveContainer" containerID="47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.543461 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.624157 4797 scope.go:117] "RemoveContainer" containerID="0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.702495 4797 scope.go:117] "RemoveContainer" containerID="47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9" Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.703245 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9\": container with ID starting with 47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9 not found: ID does not exist" containerID="47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.703280 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9"} err="failed to get container status \"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9\": rpc error: code = NotFound desc = could not find container \"47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9\": container with ID starting with 47d6553d0f53d9f675284625f2804522f5d143e251d64708e89f2631bdebb2c9 not found: ID does not exist" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.703329 4797 scope.go:117] "RemoveContainer" containerID="0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5" Jan 04 12:12:40 crc kubenswrapper[4797]: E0104 12:12:40.703559 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5\": container with ID starting with 0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5 not found: ID does not exist" containerID="0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.703605 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5"} err="failed to get container status \"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5\": rpc error: code = NotFound desc = could not find container \"0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5\": container with ID starting with 0b6b2cb873bb63997fba2a13e1a93a7f96705268825797c8373f3557765085b5 not found: ID does not exist" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.825406 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959503 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959571 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959654 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959670 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph2cq\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959688 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959718 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959734 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.959766 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.960414 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.960473 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.960648 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.960670 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls\") pod \"d5065e47-366d-4fc9-9acb-f7691489b27d\" (UID: \"d5065e47-366d-4fc9-9acb-f7691489b27d\") " Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.960821 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.961187 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.961368 4797 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.961377 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.965088 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info" (OuterVolumeSpecName: "pod-info") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.967936 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.976748 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.979101 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data" (OuterVolumeSpecName: "config-data") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.988559 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq" (OuterVolumeSpecName: "kube-api-access-ph2cq") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "kube-api-access-ph2cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.988821 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:40 crc kubenswrapper[4797]: I0104 12:12:40.991813 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf" (OuterVolumeSpecName: "server-conf") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.030643 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d5065e47-366d-4fc9-9acb-f7691489b27d" (UID: "d5065e47-366d-4fc9-9acb-f7691489b27d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.063439 4797 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d5065e47-366d-4fc9-9acb-f7691489b27d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.063602 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.066288 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph2cq\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-kube-api-access-ph2cq\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.067740 4797 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.067880 4797 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d5065e47-366d-4fc9-9acb-f7691489b27d-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.068019 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5065e47-366d-4fc9-9acb-f7691489b27d-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.068229 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.068453 4797 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d5065e47-366d-4fc9-9acb-f7691489b27d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.085900 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.172651 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.211222 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.211595 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.211833 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.211859 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.221316 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.222302 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.223743 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.223799 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.460399 4797 generic.go:334] "Generic (PLEG): container finished" podID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerID="e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" exitCode=0 Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.460473 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9b79ca4c-dde4-4027-b779-ba762e22cb3a","Type":"ContainerDied","Data":"e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc"} Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.460539 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.468880 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d5065e47-366d-4fc9-9acb-f7691489b27d","Type":"ContainerDied","Data":"04e7c09d94dcf8dd3e32b58f307acc57b1b92bb8f74bb451e4e224e9aacb9123"} Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.468933 4797 scope.go:117] "RemoveContainer" containerID="6c2bd4e26c7793a7e6748b52de489f7127e21a2457e862fa1b66701b1c8d40a1" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.469053 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.472116 4797 generic.go:334] "Generic (PLEG): container finished" podID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" exitCode=0 Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.472178 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"db00660d-8e07-4dd6-80ec-9d85f9902af4","Type":"ContainerDied","Data":"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9"} Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.472198 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"db00660d-8e07-4dd6-80ec-9d85f9902af4","Type":"ContainerDied","Data":"7eb6a624be0a174329903f0d6596c15793a68184c45c4572451cea2e7b47e768"} Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.472242 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.481753 4797 generic.go:334] "Generic (PLEG): container finished" podID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerID="4516e73ced3267162e550e03f293cb70e85913d200f7bae5f299b8b55ebea07c" exitCode=0 Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.501827 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" path="/var/lib/kubelet/pods/1414255a-a94a-4508-aa55-4ad9837afbea/volumes" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.502392 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d40c323-3444-4e84-8eb3-799d343c384d" path="/var/lib/kubelet/pods/5d40c323-3444-4e84-8eb3-799d343c384d/volumes" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.502944 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1955fef-0f64-4332-b967-c50875302a97" path="/var/lib/kubelet/pods/c1955fef-0f64-4332-b967-c50875302a97/volumes" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.535225 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerDied","Data":"4516e73ced3267162e550e03f293cb70e85913d200f7bae5f299b8b55ebea07c"} Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.545536 4797 scope.go:117] "RemoveContainer" containerID="245e10bca25f08bc626b5aac80b4d7c9c27d64f07c83fb38359bbb8adbb0f904" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.546183 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.549878 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.574886 4797 scope.go:117] "RemoveContainer" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.577522 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data\") pod \"db00660d-8e07-4dd6-80ec-9d85f9902af4\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.577568 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vklgx\" (UniqueName: \"kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx\") pod \"db00660d-8e07-4dd6-80ec-9d85f9902af4\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.577612 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle\") pod \"db00660d-8e07-4dd6-80ec-9d85f9902af4\" (UID: \"db00660d-8e07-4dd6-80ec-9d85f9902af4\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.581027 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx" (OuterVolumeSpecName: "kube-api-access-vklgx") pod "db00660d-8e07-4dd6-80ec-9d85f9902af4" (UID: "db00660d-8e07-4dd6-80ec-9d85f9902af4"). InnerVolumeSpecName "kube-api-access-vklgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.599980 4797 scope.go:117] "RemoveContainer" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" Jan 04 12:12:41 crc kubenswrapper[4797]: E0104 12:12:41.600425 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9\": container with ID starting with bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9 not found: ID does not exist" containerID="bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.600453 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9"} err="failed to get container status \"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9\": rpc error: code = NotFound desc = could not find container \"bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9\": container with ID starting with bcb5a5893d285f5cf4ef704ceed9beb89b677d777bbf5c62ef847704f7b4e2d9 not found: ID does not exist" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.600959 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data" (OuterVolumeSpecName: "config-data") pod "db00660d-8e07-4dd6-80ec-9d85f9902af4" (UID: "db00660d-8e07-4dd6-80ec-9d85f9902af4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.609304 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db00660d-8e07-4dd6-80ec-9d85f9902af4" (UID: "db00660d-8e07-4dd6-80ec-9d85f9902af4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.681743 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.681826 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.681898 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fdjj\" (UniqueName: \"kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj\") pod \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.681984 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682064 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data\") pod \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682105 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682152 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle\") pod \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\" (UID: \"9b79ca4c-dde4-4027-b779-ba762e22cb3a\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682185 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvflq\" (UniqueName: \"kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682224 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682262 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682338 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs\") pod \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\" (UID: \"85b84c4b-e97c-477c-81f1-77ba4a6a4f65\") " Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682745 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682778 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vklgx\" (UniqueName: \"kubernetes.io/projected/db00660d-8e07-4dd6-80ec-9d85f9902af4-kube-api-access-vklgx\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.682799 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db00660d-8e07-4dd6-80ec-9d85f9902af4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.683736 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.683849 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.687172 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq" (OuterVolumeSpecName: "kube-api-access-zvflq") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "kube-api-access-zvflq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.690194 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts" (OuterVolumeSpecName: "scripts") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.690853 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj" (OuterVolumeSpecName: "kube-api-access-2fdjj") pod "9b79ca4c-dde4-4027-b779-ba762e22cb3a" (UID: "9b79ca4c-dde4-4027-b779-ba762e22cb3a"). InnerVolumeSpecName "kube-api-access-2fdjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.701837 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data" (OuterVolumeSpecName: "config-data") pod "9b79ca4c-dde4-4027-b779-ba762e22cb3a" (UID: "9b79ca4c-dde4-4027-b779-ba762e22cb3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.712950 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.721484 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b79ca4c-dde4-4027-b779-ba762e22cb3a" (UID: "9b79ca4c-dde4-4027-b779-ba762e22cb3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.724636 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.754029 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.775585 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data" (OuterVolumeSpecName: "config-data") pod "85b84c4b-e97c-477c-81f1-77ba4a6a4f65" (UID: "85b84c4b-e97c-477c-81f1-77ba4a6a4f65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784281 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fdjj\" (UniqueName: \"kubernetes.io/projected/9b79ca4c-dde4-4027-b779-ba762e22cb3a-kube-api-access-2fdjj\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784441 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784506 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784667 4797 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784782 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b79ca4c-dde4-4027-b779-ba762e22cb3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784836 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvflq\" (UniqueName: \"kubernetes.io/projected/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-kube-api-access-zvflq\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784890 4797 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.784947 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.785024 4797 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.785080 4797 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.785130 4797 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b84c4b-e97c-477c-81f1-77ba4a6a4f65-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.832455 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:12:41 crc kubenswrapper[4797]: I0104 12:12:41.832529 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.293553 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c675d9b9b-9fg4r" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.158:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.293808 4797 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c675d9b9b-9fg4r" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.158:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.502028 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"85b84c4b-e97c-477c-81f1-77ba4a6a4f65","Type":"ContainerDied","Data":"5e342d3f9fb9be28dbf8d5bf6129f4af45768faa49c3b73cf8842b9fabc0e09a"} Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.502088 4797 scope.go:117] "RemoveContainer" containerID="1c0fd3972d9964ce21b2d52308680c08c54401c7fccda3b5fdc0b6f2f4fc3b91" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.502138 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.504128 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9b79ca4c-dde4-4027-b779-ba762e22cb3a","Type":"ContainerDied","Data":"0ed070d1deff0ca1cc23363530db2a00a84ad80d8f09f04afc4fc0d1c796f5c8"} Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.504192 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.541340 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.550124 4797 scope.go:117] "RemoveContainer" containerID="c8a43ccc510e54f1604c2a6c7afb923307b8eda1f1c77a645703c76df3b2be10" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.551926 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.559862 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.566099 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.578271 4797 scope.go:117] "RemoveContainer" containerID="4516e73ced3267162e550e03f293cb70e85913d200f7bae5f299b8b55ebea07c" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.618295 4797 scope.go:117] "RemoveContainer" containerID="5e95861f05a86916d5bbae50b4b2c7dbc814b737dab7253f6895bdb7c85967b0" Jan 04 12:12:42 crc kubenswrapper[4797]: I0104 12:12:42.649337 4797 scope.go:117] "RemoveContainer" containerID="e581e74fe754ab1191dbc157f0a74a4cd81265f22a9501b9d08e15c76d3d4fdc" Jan 04 12:12:43 crc kubenswrapper[4797]: I0104 12:12:43.483199 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" path="/var/lib/kubelet/pods/85b84c4b-e97c-477c-81f1-77ba4a6a4f65/volumes" Jan 04 12:12:43 crc kubenswrapper[4797]: I0104 12:12:43.484580 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" path="/var/lib/kubelet/pods/9b79ca4c-dde4-4027-b779-ba762e22cb3a/volumes" Jan 04 12:12:43 crc kubenswrapper[4797]: I0104 12:12:43.485189 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" path="/var/lib/kubelet/pods/db00660d-8e07-4dd6-80ec-9d85f9902af4/volumes" Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.207468 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.208103 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.208426 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.208449 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.210541 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.214746 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.222904 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:46 crc kubenswrapper[4797]: E0104 12:12:46.222956 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.102964 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191593 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191702 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191762 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191788 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhqp2\" (UniqueName: \"kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191838 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.191885 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.192019 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs\") pod \"3ce2926e-ae2f-44db-a48c-08d3df636d05\" (UID: \"3ce2926e-ae2f-44db-a48c-08d3df636d05\") " Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.220387 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2" (OuterVolumeSpecName: "kube-api-access-zhqp2") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "kube-api-access-zhqp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.227946 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.293398 4797 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.294341 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhqp2\" (UniqueName: \"kubernetes.io/projected/3ce2926e-ae2f-44db-a48c-08d3df636d05-kube-api-access-zhqp2\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.323564 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.323714 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.331606 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.348781 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config" (OuterVolumeSpecName: "config") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.380599 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "3ce2926e-ae2f-44db-a48c-08d3df636d05" (UID: "3ce2926e-ae2f-44db-a48c-08d3df636d05"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.395736 4797 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.395767 4797 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.395775 4797 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.395784 4797 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.395792 4797 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ce2926e-ae2f-44db-a48c-08d3df636d05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.496514 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.496542 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.496552 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.496562 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.496607 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:04.49659471 +0000 UTC m=+1483.353781419 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.567461 4797 generic.go:334] "Generic (PLEG): container finished" podID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerID="8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7" exitCode=0 Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.567525 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerDied","Data":"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7"} Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.567576 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b94d8bbf-ng2pk" event={"ID":"3ce2926e-ae2f-44db-a48c-08d3df636d05","Type":"ContainerDied","Data":"3382c8748f8373d8ffc03485bc6663d561e539d92ffa3d5602aefd3623704534"} Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.567577 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b94d8bbf-ng2pk" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.567605 4797 scope.go:117] "RemoveContainer" containerID="1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.602018 4797 scope.go:117] "RemoveContainer" containerID="8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.620495 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.637246 4797 scope.go:117] "RemoveContainer" containerID="1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423" Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.637819 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423\": container with ID starting with 1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423 not found: ID does not exist" containerID="1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.637860 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423"} err="failed to get container status \"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423\": rpc error: code = NotFound desc = could not find container \"1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423\": container with ID starting with 1b0da46c1b5440859eff68c71313810caea530175cb86a259909ad171ec41423 not found: ID does not exist" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.637885 4797 scope.go:117] "RemoveContainer" containerID="8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7" Jan 04 12:12:48 crc kubenswrapper[4797]: E0104 12:12:48.638265 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7\": container with ID starting with 8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7 not found: ID does not exist" containerID="8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.638298 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7"} err="failed to get container status \"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7\": rpc error: code = NotFound desc = could not find container \"8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7\": container with ID starting with 8085633a38550ac3d8119b312e663b6a287c42e0558d6c6745bf1a450c5b42a7 not found: ID does not exist" Jan 04 12:12:48 crc kubenswrapper[4797]: I0104 12:12:48.638595 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-56b94d8bbf-ng2pk"] Jan 04 12:12:49 crc kubenswrapper[4797]: I0104 12:12:49.492624 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" path="/var/lib/kubelet/pods/3ce2926e-ae2f-44db-a48c-08d3df636d05/volumes" Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.207502 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.208497 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.209268 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.209335 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.209408 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.212441 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.216127 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:51 crc kubenswrapper[4797]: E0104 12:12:51.216199 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.207708 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.208826 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.209281 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.209339 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.209392 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.212033 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.214279 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:12:56 crc kubenswrapper[4797]: E0104 12:12:56.214315 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.207360 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.208205 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.208598 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.208762 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.208802 4797 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.210366 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.211562 4797 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:01 crc kubenswrapper[4797]: E0104 12:13:01.211631 4797 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2ft9n" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:13:03 crc kubenswrapper[4797]: I0104 12:13:03.745776 4797 generic.go:334] "Generic (PLEG): container finished" podID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerID="70f1225e794568121e45dff575941e5d35fc27a1defcff2979df065f4ecbaf37" exitCode=137 Jan 04 12:13:03 crc kubenswrapper[4797]: I0104 12:13:03.746266 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"70f1225e794568121e45dff575941e5d35fc27a1defcff2979df065f4ecbaf37"} Jan 04 12:13:03 crc kubenswrapper[4797]: I0104 12:13:03.754215 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2ft9n_be75b707-995c-4dd4-958a-a7c2b8e4fb4e/ovs-vswitchd/0.log" Jan 04 12:13:03 crc kubenswrapper[4797]: I0104 12:13:03.756101 4797 generic.go:334] "Generic (PLEG): container finished" podID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" exitCode=137 Jan 04 12:13:03 crc kubenswrapper[4797]: I0104 12:13:03.756216 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerDied","Data":"d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5"} Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.139756 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2ft9n_be75b707-995c-4dd4-958a-a7c2b8e4fb4e/ovs-vswitchd/0.log" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.140519 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.262945 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263055 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263146 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263182 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263174 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263236 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9tx2\" (UniqueName: \"kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263243 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run" (OuterVolumeSpecName: "var-run") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263392 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts\") pod \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\" (UID: \"be75b707-995c-4dd4-958a-a7c2b8e4fb4e\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263275 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib" (OuterVolumeSpecName: "var-lib") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263284 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log" (OuterVolumeSpecName: "var-log") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263866 4797 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263910 4797 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-lib\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263935 4797 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.263954 4797 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-var-log\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.264610 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts" (OuterVolumeSpecName: "scripts") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.270487 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2" (OuterVolumeSpecName: "kube-api-access-z9tx2") pod "be75b707-995c-4dd4-958a-a7c2b8e4fb4e" (UID: "be75b707-995c-4dd4-958a-a7c2b8e4fb4e"). InnerVolumeSpecName "kube-api-access-z9tx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.364788 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9tx2\" (UniqueName: \"kubernetes.io/projected/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-kube-api-access-z9tx2\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.364825 4797 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be75b707-995c-4dd4-958a-a7c2b8e4fb4e-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:04 crc kubenswrapper[4797]: E0104 12:13:04.567539 4797 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Jan 04 12:13:04 crc kubenswrapper[4797]: E0104 12:13:04.567584 4797 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Jan 04 12:13:04 crc kubenswrapper[4797]: E0104 12:13:04.567596 4797 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:13:04 crc kubenswrapper[4797]: E0104 12:13:04.567609 4797 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:13:04 crc kubenswrapper[4797]: E0104 12:13:04.567835 4797 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift podName:78ea6800-bdfe-4593-8aad-7aaba5be8897 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:36.567806109 +0000 UTC m=+1515.424992818 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift") pod "swift-storage-0" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.766841 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2ft9n_be75b707-995c-4dd4-958a-a7c2b8e4fb4e/ovs-vswitchd/0.log" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.768024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2ft9n" event={"ID":"be75b707-995c-4dd4-958a-a7c2b8e4fb4e","Type":"ContainerDied","Data":"e97a6a189846df8453b6f473dcf31d0ba9db2f9777cf92d8280d68b02b850d5d"} Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.768073 4797 scope.go:117] "RemoveContainer" containerID="d6d61798a92bf1b413c3b88a5465b98cab4ba0d46e2d0fefbc494a2c592413a5" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.768095 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2ft9n" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.775688 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"78ea6800-bdfe-4593-8aad-7aaba5be8897","Type":"ContainerDied","Data":"c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15"} Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.775725 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3a97e796c6fe1dcf5c18556d7c80309446e0e490a57eb558857403ba6d41e15" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.802701 4797 scope.go:117] "RemoveContainer" containerID="07c65fe822043677786148411dac469d9c1fed3e5cdee2ec14f6fbdde82f2093" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.810169 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.827524 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.834522 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-2ft9n"] Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.836260 4797 scope.go:117] "RemoveContainer" containerID="13b3c1f89d1638121f9ff2d3c0347d60458aa80a5677df698bfe3f584120c7fe" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.978767 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp4w5\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5\") pod \"78ea6800-bdfe-4593-8aad-7aaba5be8897\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.978878 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache\") pod \"78ea6800-bdfe-4593-8aad-7aaba5be8897\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.978913 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock\") pod \"78ea6800-bdfe-4593-8aad-7aaba5be8897\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.978942 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"78ea6800-bdfe-4593-8aad-7aaba5be8897\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.979067 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") pod \"78ea6800-bdfe-4593-8aad-7aaba5be8897\" (UID: \"78ea6800-bdfe-4593-8aad-7aaba5be8897\") " Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.979565 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock" (OuterVolumeSpecName: "lock") pod "78ea6800-bdfe-4593-8aad-7aaba5be8897" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.979714 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache" (OuterVolumeSpecName: "cache") pod "78ea6800-bdfe-4593-8aad-7aaba5be8897" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.992202 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "swift") pod "78ea6800-bdfe-4593-8aad-7aaba5be8897" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.994232 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "78ea6800-bdfe-4593-8aad-7aaba5be8897" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:04 crc kubenswrapper[4797]: I0104 12:13:04.998205 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5" (OuterVolumeSpecName: "kube-api-access-bp4w5") pod "78ea6800-bdfe-4593-8aad-7aaba5be8897" (UID: "78ea6800-bdfe-4593-8aad-7aaba5be8897"). InnerVolumeSpecName "kube-api-access-bp4w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.080288 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp4w5\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-kube-api-access-bp4w5\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.080338 4797 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-cache\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.080353 4797 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/78ea6800-bdfe-4593-8aad-7aaba5be8897-lock\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.080392 4797 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.080406 4797 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/78ea6800-bdfe-4593-8aad-7aaba5be8897-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.104320 4797 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.181377 4797 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.517695 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" path="/var/lib/kubelet/pods/be75b707-995c-4dd4-958a-a7c2b8e4fb4e/volumes" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.789776 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.831055 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:13:05 crc kubenswrapper[4797]: I0104 12:13:05.843144 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:13:07 crc kubenswrapper[4797]: I0104 12:13:07.489170 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" path="/var/lib/kubelet/pods/78ea6800-bdfe-4593-8aad-7aaba5be8897/volumes" Jan 04 12:13:11 crc kubenswrapper[4797]: I0104 12:13:11.534816 4797 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podd5065e47-366d-4fc9-9acb-f7691489b27d"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podd5065e47-366d-4fc9-9acb-f7691489b27d] : Timed out while waiting for systemd to remove kubepods-burstable-podd5065e47_366d_4fc9_9acb_f7691489b27d.slice" Jan 04 12:13:11 crc kubenswrapper[4797]: E0104 12:13:11.535217 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable podd5065e47-366d-4fc9-9acb-f7691489b27d] : unable to destroy cgroup paths for cgroup [kubepods burstable podd5065e47-366d-4fc9-9acb-f7691489b27d] : Timed out while waiting for systemd to remove kubepods-burstable-podd5065e47_366d_4fc9_9acb_f7691489b27d.slice" pod="openstack/rabbitmq-cell1-server-0" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" Jan 04 12:13:11 crc kubenswrapper[4797]: I0104 12:13:11.845234 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:13:11 crc kubenswrapper[4797]: I0104 12:13:11.872122 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:11 crc kubenswrapper[4797]: I0104 12:13:11.888109 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:13 crc kubenswrapper[4797]: I0104 12:13:13.491664 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" path="/var/lib/kubelet/pods/d5065e47-366d-4fc9-9acb-f7691489b27d/volumes" Jan 04 12:13:49 crc kubenswrapper[4797]: I0104 12:13:49.494803 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:13:49 crc kubenswrapper[4797]: I0104 12:13:49.500241 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:14:19 crc kubenswrapper[4797]: I0104 12:14:19.493188 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:14:19 crc kubenswrapper[4797]: I0104 12:14:19.494058 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.440626 4797 scope.go:117] "RemoveContainer" containerID="e72f8d57673f1f31265b970246c597d52e4aa05a77c81c39ae84eb7621a5af98" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.465831 4797 scope.go:117] "RemoveContainer" containerID="d3af461d9e11838f802cfe0a7de35c8ace652dcb15039a7ab0301617ec90e7f2" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.522363 4797 scope.go:117] "RemoveContainer" containerID="95970be1d420d961aa5faba4feae52bf2847295f0bff54ad0ab5ecfc128f1139" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.556170 4797 scope.go:117] "RemoveContainer" containerID="2ddf43a128cda79487c737e8b808ccee520d60c4184785332b0d190c687e82c4" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.586594 4797 scope.go:117] "RemoveContainer" containerID="a6a89f4ea5c21966bdee177b3823cf6d0aaad824fafa561c29a66e6bd00694b7" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.612093 4797 scope.go:117] "RemoveContainer" containerID="f92a49d431c52702876723a17e42bd64b37bc0d9cd0421a5b6941ec41e2dc6b7" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.641194 4797 scope.go:117] "RemoveContainer" containerID="c9d82f755fa7d5c278d143cd78882857725e2a0d5842a8e491e85b49ed482d22" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.665095 4797 scope.go:117] "RemoveContainer" containerID="04143dacb56865e0ec29514b982ee5edd9082a91bbf7f0fdd465db07b6bc6e9c" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.692229 4797 scope.go:117] "RemoveContainer" containerID="4bd8a0778e29226183846c723b3b1dbfb7b51f65f48a65e13a57c6521dc2d967" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.720138 4797 scope.go:117] "RemoveContainer" containerID="d29ca7741994258ece164501fcffe7239b50963d2ad0c1de905c698fc6ee8679" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.743764 4797 scope.go:117] "RemoveContainer" containerID="65c274a2548c417a91f46662d6419264a20263e62649a7b5f1180ed7e25e6e5e" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.775153 4797 scope.go:117] "RemoveContainer" containerID="99d34e3c79b062431c9d84a0e920a2cea64a5e8ddf3dd8c6b4b199964f36fd85" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.802311 4797 scope.go:117] "RemoveContainer" containerID="c4b06a39de7f8fcb4a0aabdb535223544b26a0c5df8ee9344130d0f22a2c7aa2" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.827232 4797 scope.go:117] "RemoveContainer" containerID="9d917f5809cfdb7d3e5560e391457ae4c770a5118b5655d11d4280c6634d5e65" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.857427 4797 scope.go:117] "RemoveContainer" containerID="f4fe31bebac4759860a28ee2a6a704b01148a21eef8e356261b41ddb81be35c1" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.893741 4797 scope.go:117] "RemoveContainer" containerID="79f59fca969a408ad9a7a89258e348f5ad676b2c4c9925be24dca40d1cff80a8" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.919322 4797 scope.go:117] "RemoveContainer" containerID="c893edf3b97be1c8c7f521e8f94c429b5040ef1eb7fdfb089bb26d03f0b7689d" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.945489 4797 scope.go:117] "RemoveContainer" containerID="5f3a604178468117d07b94420a105b1d9dc97d1da1e7ba0ee92010cef552b3f1" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.963105 4797 scope.go:117] "RemoveContainer" containerID="dac8160cf70dc4b847fed968eb4544e73150e080abe8ddac5ed8f69951612687" Jan 04 12:14:23 crc kubenswrapper[4797]: I0104 12:14:23.980937 4797 scope.go:117] "RemoveContainer" containerID="74fc952a13313899a93f6ee90b9e6169f5faee6cabe0dffd0615ff5e9f2d1024" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.004097 4797 scope.go:117] "RemoveContainer" containerID="983ab368fd1a7cc5f480857b432d0a3e9e25e3576b0af3b3338d273ed9c26b17" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.025794 4797 scope.go:117] "RemoveContainer" containerID="d94828be090a9c92168c4bc7043848fc90bb1e6e64c5c3b90561fa797b2111c6" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.091132 4797 scope.go:117] "RemoveContainer" containerID="ab503fd970b32577760bbe5ec35f3c0df3a184059f20a5bf6e5b7c34bf2d9638" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.115570 4797 scope.go:117] "RemoveContainer" containerID="4d40f334d1ddce402a853dae1624ea09bc54b4ce1eb605752f43dee829d250b6" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.136316 4797 scope.go:117] "RemoveContainer" containerID="6434cfdff0caacbdc87686c3fccdc177be05f4195a31c1b2f6bbbfe1abc8fc18" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.156771 4797 scope.go:117] "RemoveContainer" containerID="186a9d18142f1a946ca7ae7698d3d92717cd8c9dba8656f7d9054ca1fcf3d306" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.186252 4797 scope.go:117] "RemoveContainer" containerID="4d0acf805d72a971037b409b90ec01a6ebbc5d590e92c591c625d2fa707db99c" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.209914 4797 scope.go:117] "RemoveContainer" containerID="2706f017a019b29eb56e1869f628c26c7a0403d0ac794f7452caa2d980a884e3" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.228370 4797 scope.go:117] "RemoveContainer" containerID="8e5d7c350df979ff40b9c3dcb14ce546947dc7c0a8d44e33037dd6c84c3f15fe" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.248841 4797 scope.go:117] "RemoveContainer" containerID="2b82ee45235bd2492b8f0fbdeb7b02c90e80f762e2a643a80221e2da952c67c8" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.264760 4797 scope.go:117] "RemoveContainer" containerID="3878ed80aaf358279c48ab4d9c9c529e7175864a9d22b44343cee0f365760367" Jan 04 12:14:24 crc kubenswrapper[4797]: I0104 12:14:24.291682 4797 scope.go:117] "RemoveContainer" containerID="9d2278141a7532207d8e73b992de20ebd9c40d4061f0a7d49e36b0f7ad19b5c7" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.492610 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.494078 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.494178 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.495261 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.495400 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" gracePeriod=600 Jan 04 12:14:49 crc kubenswrapper[4797]: E0104 12:14:49.634496 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.873641 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" exitCode=0 Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.873756 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2"} Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.873884 4797 scope.go:117] "RemoveContainer" containerID="c10ebbc3d16697443d4182ec94a3c6cd5f1a67864e3eeebeca580c7d14426666" Jan 04 12:14:49 crc kubenswrapper[4797]: I0104 12:14:49.874816 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:14:49 crc kubenswrapper[4797]: E0104 12:14:49.875377 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.878459 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879448 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879469 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879485 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879497 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879525 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879539 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879556 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879568 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879584 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879596 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879609 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="openstack-network-exporter" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879808 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="openstack-network-exporter" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879827 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879842 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879859 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerName="nova-scheduler-scheduler" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879871 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerName="nova-scheduler-scheduler" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879890 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server-init" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879902 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server-init" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879915 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="swift-recon-cron" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879927 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="swift-recon-cron" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879941 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.879954 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-server" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.879978 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="mysql-bootstrap" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880021 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="mysql-bootstrap" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880039 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880051 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880065 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880076 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880130 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="proxy-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880142 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="proxy-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880161 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-central-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880174 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-central-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880187 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880200 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880215 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880229 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880245 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="galera" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880258 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="galera" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880278 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880291 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880305 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-reaper" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880316 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-reaper" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880330 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-expirer" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880342 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-expirer" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880360 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerName="nova-cell0-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880373 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerName="nova-cell0-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880390 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="setup-container" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880403 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="setup-container" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880418 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" containerName="kube-state-metrics" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880432 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" containerName="kube-state-metrics" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880446 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880460 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880482 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="ovn-northd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880494 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="ovn-northd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880513 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880526 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880542 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880557 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880572 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerName="nova-cell1-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880584 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerName="nova-cell1-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880607 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880618 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880635 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880647 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880668 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880681 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880704 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880715 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880735 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880746 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880767 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880779 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880799 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880811 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880831 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880844 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880856 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880868 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880885 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880897 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880912 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" containerName="memcached" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880924 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" containerName="memcached" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880947 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.880959 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.880978 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881018 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-server" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881038 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-notification-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881050 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-notification-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881065 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d40c323-3444-4e84-8eb3-799d343c384d" containerName="keystone-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881077 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d40c323-3444-4e84-8eb3-799d343c384d" containerName="keystone-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881096 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881107 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-api" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881119 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="sg-core" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881131 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="sg-core" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881146 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881157 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-server" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881176 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881188 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881206 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="rsync" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881218 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="rsync" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881233 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881245 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881266 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="setup-container" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881277 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="setup-container" Jan 04 12:14:59 crc kubenswrapper[4797]: E0104 12:14:59.881298 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881310 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881538 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-expirer" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881562 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="102e7d3d-5368-4d87-ba33-874aeed5eaa9" containerName="memcached" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881585 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881602 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="rsync" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881622 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881640 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881660 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881678 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d40c323-3444-4e84-8eb3-799d343c384d" containerName="keystone-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881691 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881707 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b79ca4c-dde4-4027-b779-ba762e22cb3a" containerName="nova-scheduler-scheduler" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881722 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881735 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881749 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881761 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881778 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="970e7570-2ccd-4420-8e1f-70aff6cf2f38" containerName="nova-cell1-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881799 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovsdb-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881816 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6d5dd79-6901-4d58-a419-1a6d2e352ab3" containerName="cinder-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881834 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881845 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="db00660d-8e07-4dd6-80ec-9d85f9902af4" containerName="nova-cell0-conductor-conductor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881866 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dd48a0b-cc19-4d03-9c3c-174d89f504c7" containerName="nova-metadata-metadata" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881884 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881899 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-reaper" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881915 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881936 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="be75b707-995c-4dd4-958a-a7c2b8e4fb4e" containerName="ovs-vswitchd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881953 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f904f7de-5407-4427-a82c-e31b26195c0a" containerName="placement-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881966 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ffb9045-87ff-4c59-ac14-5de55b6cd42e" containerName="ovn-controller" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.881985 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882027 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882047 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5065e47-366d-4fc9-9acb-f7691489b27d" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882064 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="sg-core" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882083 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-updater" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882098 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="proxy-httpd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882120 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="swift-recon-cron" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882136 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="1414255a-a94a-4508-aa55-4ad9837afbea" containerName="rabbitmq" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882152 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="ovn-northd" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882169 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7bef264-130e-4b89-ae25-bff622d12a16" containerName="nova-api-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882182 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="account-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882198 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="5208dae4-fade-400c-a1a0-edbb3bf8d3dd" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882215 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad32d59a-781c-4c96-a9c3-e16c617da9b1" containerName="glance-log" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882227 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e31bb2-dc54-40fe-843a-6a89d4e91dda" containerName="kube-state-metrics" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882245 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-replicator" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882263 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ce2926e-ae2f-44db-a48c-08d3df636d05" containerName="neutron-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882277 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a313c1-13df-4f65-9b14-4d9ee83d637c" containerName="openstack-network-exporter" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882290 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="container-auditor" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882309 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-central-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882330 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd01dbb-d505-4555-a9b2-d9b9334aae11" containerName="barbican-api" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882353 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ea6800-bdfe-4593-8aad-7aaba5be8897" containerName="object-server" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882369 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b84c4b-e97c-477c-81f1-77ba4a6a4f65" containerName="ceilometer-notification-agent" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.882384 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1955fef-0f64-4332-b967-c50875302a97" containerName="galera" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.884257 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:14:59 crc kubenswrapper[4797]: I0104 12:14:59.913618 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.052832 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.052934 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.053172 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsr7c\" (UniqueName: \"kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.151041 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw"] Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.152414 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.155158 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsr7c\" (UniqueName: \"kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.155293 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.155361 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.156534 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.156753 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.161572 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.161602 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.163872 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw"] Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.186949 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsr7c\" (UniqueName: \"kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c\") pod \"redhat-marketplace-tdmzm\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.230395 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.257885 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.257948 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.258059 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdxbq\" (UniqueName: \"kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.359092 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdxbq\" (UniqueName: \"kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.359227 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.359261 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.360376 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.367495 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.378208 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdxbq\" (UniqueName: \"kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq\") pod \"collect-profiles-29458815-nqzcw\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.478838 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.709138 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:15:00 crc kubenswrapper[4797]: I0104 12:15:00.927269 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw"] Jan 04 12:15:00 crc kubenswrapper[4797]: W0104 12:15:00.963930 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod392d2062_b718_4eaf_a87d_9d990d0dfc0e.slice/crio-54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad WatchSource:0}: Error finding container 54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad: Status 404 returned error can't find the container with id 54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad Jan 04 12:15:01 crc kubenswrapper[4797]: I0104 12:15:01.023347 4797 generic.go:334] "Generic (PLEG): container finished" podID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerID="6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789" exitCode=0 Jan 04 12:15:01 crc kubenswrapper[4797]: I0104 12:15:01.023410 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerDied","Data":"6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789"} Jan 04 12:15:01 crc kubenswrapper[4797]: I0104 12:15:01.023475 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerStarted","Data":"e2eb13e1db0043cced6f9564be6788ed487cc6b8756ed40c47deb9d8814476d3"} Jan 04 12:15:01 crc kubenswrapper[4797]: I0104 12:15:01.025798 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" event={"ID":"392d2062-b718-4eaf-a87d-9d990d0dfc0e","Type":"ContainerStarted","Data":"54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad"} Jan 04 12:15:02 crc kubenswrapper[4797]: I0104 12:15:02.050760 4797 generic.go:334] "Generic (PLEG): container finished" podID="392d2062-b718-4eaf-a87d-9d990d0dfc0e" containerID="d54a12406a5fa13f931a1ad03f4b0c3e8ec9df1c6d8b5aa76c97d4da8e059ca4" exitCode=0 Jan 04 12:15:02 crc kubenswrapper[4797]: I0104 12:15:02.050847 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" event={"ID":"392d2062-b718-4eaf-a87d-9d990d0dfc0e","Type":"ContainerDied","Data":"d54a12406a5fa13f931a1ad03f4b0c3e8ec9df1c6d8b5aa76c97d4da8e059ca4"} Jan 04 12:15:02 crc kubenswrapper[4797]: I0104 12:15:02.474650 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:15:02 crc kubenswrapper[4797]: E0104 12:15:02.475152 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.067814 4797 generic.go:334] "Generic (PLEG): container finished" podID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerID="c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2" exitCode=0 Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.067893 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerDied","Data":"c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2"} Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.388415 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.506627 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume\") pod \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.506712 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdxbq\" (UniqueName: \"kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq\") pod \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.507960 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume" (OuterVolumeSpecName: "config-volume") pod "392d2062-b718-4eaf-a87d-9d990d0dfc0e" (UID: "392d2062-b718-4eaf-a87d-9d990d0dfc0e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.508078 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume\") pod \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\" (UID: \"392d2062-b718-4eaf-a87d-9d990d0dfc0e\") " Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.508533 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/392d2062-b718-4eaf-a87d-9d990d0dfc0e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.516526 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "392d2062-b718-4eaf-a87d-9d990d0dfc0e" (UID: "392d2062-b718-4eaf-a87d-9d990d0dfc0e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.518297 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq" (OuterVolumeSpecName: "kube-api-access-bdxbq") pod "392d2062-b718-4eaf-a87d-9d990d0dfc0e" (UID: "392d2062-b718-4eaf-a87d-9d990d0dfc0e"). InnerVolumeSpecName "kube-api-access-bdxbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.609838 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdxbq\" (UniqueName: \"kubernetes.io/projected/392d2062-b718-4eaf-a87d-9d990d0dfc0e-kube-api-access-bdxbq\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:03 crc kubenswrapper[4797]: I0104 12:15:03.610113 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/392d2062-b718-4eaf-a87d-9d990d0dfc0e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:04 crc kubenswrapper[4797]: I0104 12:15:04.079453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" event={"ID":"392d2062-b718-4eaf-a87d-9d990d0dfc0e","Type":"ContainerDied","Data":"54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad"} Jan 04 12:15:04 crc kubenswrapper[4797]: I0104 12:15:04.079790 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54287b360b2e1e61b8af028756ed9120b106c52fc601d90f74847a1c0cb1d2ad" Jan 04 12:15:04 crc kubenswrapper[4797]: I0104 12:15:04.079874 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw" Jan 04 12:15:04 crc kubenswrapper[4797]: I0104 12:15:04.082383 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerStarted","Data":"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512"} Jan 04 12:15:04 crc kubenswrapper[4797]: I0104 12:15:04.106570 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tdmzm" podStartSLOduration=2.6272814330000003 podStartE2EDuration="5.106551993s" podCreationTimestamp="2026-01-04 12:14:59 +0000 UTC" firstStartedPulling="2026-01-04 12:15:01.02748703 +0000 UTC m=+1599.884673739" lastFinishedPulling="2026-01-04 12:15:03.50675754 +0000 UTC m=+1602.363944299" observedRunningTime="2026-01-04 12:15:04.099054723 +0000 UTC m=+1602.956241452" watchObservedRunningTime="2026-01-04 12:15:04.106551993 +0000 UTC m=+1602.963738702" Jan 04 12:15:10 crc kubenswrapper[4797]: I0104 12:15:10.231436 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:10 crc kubenswrapper[4797]: I0104 12:15:10.232140 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:10 crc kubenswrapper[4797]: I0104 12:15:10.308765 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:11 crc kubenswrapper[4797]: I0104 12:15:11.215213 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:11 crc kubenswrapper[4797]: I0104 12:15:11.282588 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:15:13 crc kubenswrapper[4797]: I0104 12:15:13.173418 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tdmzm" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="registry-server" containerID="cri-o://c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512" gracePeriod=2 Jan 04 12:15:13 crc kubenswrapper[4797]: I0104 12:15:13.474527 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:15:13 crc kubenswrapper[4797]: E0104 12:15:13.475604 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.170606 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.191499 4797 generic.go:334] "Generic (PLEG): container finished" podID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerID="c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512" exitCode=0 Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.191557 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerDied","Data":"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512"} Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.191593 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tdmzm" event={"ID":"dbc530c5-1f8a-42dc-ab5c-6b834f965719","Type":"ContainerDied","Data":"e2eb13e1db0043cced6f9564be6788ed487cc6b8756ed40c47deb9d8814476d3"} Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.191621 4797 scope.go:117] "RemoveContainer" containerID="c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.191787 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tdmzm" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.222587 4797 scope.go:117] "RemoveContainer" containerID="c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.244978 4797 scope.go:117] "RemoveContainer" containerID="6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.267330 4797 scope.go:117] "RemoveContainer" containerID="c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512" Jan 04 12:15:14 crc kubenswrapper[4797]: E0104 12:15:14.268104 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512\": container with ID starting with c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512 not found: ID does not exist" containerID="c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.268177 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512"} err="failed to get container status \"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512\": rpc error: code = NotFound desc = could not find container \"c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512\": container with ID starting with c624d4bbf363a8dc234dfe9bdecfff5827ba0d24ca7014d5da87c83f2a6f2512 not found: ID does not exist" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.268217 4797 scope.go:117] "RemoveContainer" containerID="c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2" Jan 04 12:15:14 crc kubenswrapper[4797]: E0104 12:15:14.268699 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2\": container with ID starting with c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2 not found: ID does not exist" containerID="c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.268740 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2"} err="failed to get container status \"c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2\": rpc error: code = NotFound desc = could not find container \"c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2\": container with ID starting with c3b032381662b888e57f6760188d50d198df7426866d0ebd6de43d4a8f45dbd2 not found: ID does not exist" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.268766 4797 scope.go:117] "RemoveContainer" containerID="6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789" Jan 04 12:15:14 crc kubenswrapper[4797]: E0104 12:15:14.269370 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789\": container with ID starting with 6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789 not found: ID does not exist" containerID="6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.269422 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789"} err="failed to get container status \"6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789\": rpc error: code = NotFound desc = could not find container \"6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789\": container with ID starting with 6be364fa4a028ebd828e90928381be0b54bc415e71e9a1c2cde5f5098e556789 not found: ID does not exist" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.279414 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content\") pod \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.279571 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities\") pod \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.279623 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsr7c\" (UniqueName: \"kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c\") pod \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\" (UID: \"dbc530c5-1f8a-42dc-ab5c-6b834f965719\") " Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.280552 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities" (OuterVolumeSpecName: "utilities") pod "dbc530c5-1f8a-42dc-ab5c-6b834f965719" (UID: "dbc530c5-1f8a-42dc-ab5c-6b834f965719"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.288550 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c" (OuterVolumeSpecName: "kube-api-access-lsr7c") pod "dbc530c5-1f8a-42dc-ab5c-6b834f965719" (UID: "dbc530c5-1f8a-42dc-ab5c-6b834f965719"). InnerVolumeSpecName "kube-api-access-lsr7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.309527 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbc530c5-1f8a-42dc-ab5c-6b834f965719" (UID: "dbc530c5-1f8a-42dc-ab5c-6b834f965719"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.381220 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.381253 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsr7c\" (UniqueName: \"kubernetes.io/projected/dbc530c5-1f8a-42dc-ab5c-6b834f965719-kube-api-access-lsr7c\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.381264 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbc530c5-1f8a-42dc-ab5c-6b834f965719-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.533474 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:15:14 crc kubenswrapper[4797]: I0104 12:15:14.544637 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tdmzm"] Jan 04 12:15:15 crc kubenswrapper[4797]: I0104 12:15:15.485304 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" path="/var/lib/kubelet/pods/dbc530c5-1f8a-42dc-ab5c-6b834f965719/volumes" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.580658 4797 scope.go:117] "RemoveContainer" containerID="f525d2ac4c5fe5db4a1eec40b42d8797edd3badd36e5653dd0a4e99c191a329c" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.623935 4797 scope.go:117] "RemoveContainer" containerID="3d32769fbc3a11b048b5223b877e645906938f8e70f5f45524f14c66a9518161" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.662316 4797 scope.go:117] "RemoveContainer" containerID="02ec9f1e17cb5f669f96a93b638752b2ab4a42af575488b118e2ccb8752faf9a" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.739629 4797 scope.go:117] "RemoveContainer" containerID="31c529a7d82420563a356ef0183a956a8b578eb086f36f052c1f35e27b1aed25" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.780738 4797 scope.go:117] "RemoveContainer" containerID="6096170da8024bd185ccbb449e4c936cf1c7b826fb7c745391b53796803d1ba7" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.835351 4797 scope.go:117] "RemoveContainer" containerID="b2f03c2d81b3bc4e7e06115c067fe0fe16ce918dfcb1eb0ed143a632062a64db" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.862744 4797 scope.go:117] "RemoveContainer" containerID="b6e4f4c9fc24c93108e20d30e67e1eacd46dba1b62cf51fbfafb10e51011ab52" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.884463 4797 scope.go:117] "RemoveContainer" containerID="2f71e905c6bd19bf3b24560c7b2912b9eb5d025c169894224f61b0bd8ce66f2e" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.902114 4797 scope.go:117] "RemoveContainer" containerID="8b6da3f0a29ef4c778dc8fb64cc5a3f05fdd14f45969bc51a6a39c1ee76a3e94" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.924499 4797 scope.go:117] "RemoveContainer" containerID="10f06d60e44f21d7b74181cb279dbd06be17a5955d943a61e439c5ecc67b13b1" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.950155 4797 scope.go:117] "RemoveContainer" containerID="39cd871a2cdd33c13df6a2d820e492302dd31e6705fb1d98b69638efa2161416" Jan 04 12:15:24 crc kubenswrapper[4797]: I0104 12:15:24.969753 4797 scope.go:117] "RemoveContainer" containerID="70f1225e794568121e45dff575941e5d35fc27a1defcff2979df065f4ecbaf37" Jan 04 12:15:27 crc kubenswrapper[4797]: I0104 12:15:27.475833 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:15:27 crc kubenswrapper[4797]: E0104 12:15:27.476813 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.007595 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:33 crc kubenswrapper[4797]: E0104 12:15:33.015204 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="registry-server" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.015636 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="registry-server" Jan 04 12:15:33 crc kubenswrapper[4797]: E0104 12:15:33.015870 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="extract-utilities" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.016073 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="extract-utilities" Jan 04 12:15:33 crc kubenswrapper[4797]: E0104 12:15:33.016293 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="extract-content" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.016451 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="extract-content" Jan 04 12:15:33 crc kubenswrapper[4797]: E0104 12:15:33.016652 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="392d2062-b718-4eaf-a87d-9d990d0dfc0e" containerName="collect-profiles" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.016818 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="392d2062-b718-4eaf-a87d-9d990d0dfc0e" containerName="collect-profiles" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.017342 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc530c5-1f8a-42dc-ab5c-6b834f965719" containerName="registry-server" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.017379 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="392d2062-b718-4eaf-a87d-9d990d0dfc0e" containerName="collect-profiles" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.018581 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.020498 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.099220 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkkfj\" (UniqueName: \"kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.099327 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.099357 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.201122 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.201198 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.201327 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkkfj\" (UniqueName: \"kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.201894 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.205294 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.226696 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkkfj\" (UniqueName: \"kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj\") pod \"community-operators-sd7wr\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.355444 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:33 crc kubenswrapper[4797]: I0104 12:15:33.663479 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:34 crc kubenswrapper[4797]: I0104 12:15:34.384179 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa6efc2-869a-4903-9129-5106406df350" containerID="5a0aeb24b6f28a1cfc62412c70618876f6fce2dc7990352f77860b18c2dd5d60" exitCode=0 Jan 04 12:15:34 crc kubenswrapper[4797]: I0104 12:15:34.384278 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerDied","Data":"5a0aeb24b6f28a1cfc62412c70618876f6fce2dc7990352f77860b18c2dd5d60"} Jan 04 12:15:34 crc kubenswrapper[4797]: I0104 12:15:34.384628 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerStarted","Data":"c3c317a141d39f12923c7ad9cc5e58ab4553dbca59cf6b4205ffe8a6e36f163c"} Jan 04 12:15:35 crc kubenswrapper[4797]: I0104 12:15:35.397526 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerStarted","Data":"17c0b419b1524cfbdd4d0a57c865554e00e1bc27c5d53df5bd8f3f5543414903"} Jan 04 12:15:36 crc kubenswrapper[4797]: I0104 12:15:36.411368 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa6efc2-869a-4903-9129-5106406df350" containerID="17c0b419b1524cfbdd4d0a57c865554e00e1bc27c5d53df5bd8f3f5543414903" exitCode=0 Jan 04 12:15:36 crc kubenswrapper[4797]: I0104 12:15:36.411464 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerDied","Data":"17c0b419b1524cfbdd4d0a57c865554e00e1bc27c5d53df5bd8f3f5543414903"} Jan 04 12:15:37 crc kubenswrapper[4797]: I0104 12:15:37.430876 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerStarted","Data":"136d740584a32f4f0ba8a764195b204d2ff9fcceb418c124137ba4c2bb0b61ca"} Jan 04 12:15:37 crc kubenswrapper[4797]: I0104 12:15:37.465217 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sd7wr" podStartSLOduration=2.9333933979999998 podStartE2EDuration="5.465184217s" podCreationTimestamp="2026-01-04 12:15:32 +0000 UTC" firstStartedPulling="2026-01-04 12:15:34.38716036 +0000 UTC m=+1633.244347109" lastFinishedPulling="2026-01-04 12:15:36.918951179 +0000 UTC m=+1635.776137928" observedRunningTime="2026-01-04 12:15:37.461853492 +0000 UTC m=+1636.319040241" watchObservedRunningTime="2026-01-04 12:15:37.465184217 +0000 UTC m=+1636.322370966" Jan 04 12:15:39 crc kubenswrapper[4797]: I0104 12:15:39.475763 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:15:39 crc kubenswrapper[4797]: E0104 12:15:39.476216 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:15:43 crc kubenswrapper[4797]: I0104 12:15:43.356082 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:43 crc kubenswrapper[4797]: I0104 12:15:43.356916 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:43 crc kubenswrapper[4797]: I0104 12:15:43.433435 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:43 crc kubenswrapper[4797]: I0104 12:15:43.578895 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:43 crc kubenswrapper[4797]: I0104 12:15:43.682722 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:45 crc kubenswrapper[4797]: I0104 12:15:45.523697 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sd7wr" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="registry-server" containerID="cri-o://136d740584a32f4f0ba8a764195b204d2ff9fcceb418c124137ba4c2bb0b61ca" gracePeriod=2 Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.546180 4797 generic.go:334] "Generic (PLEG): container finished" podID="8fa6efc2-869a-4903-9129-5106406df350" containerID="136d740584a32f4f0ba8a764195b204d2ff9fcceb418c124137ba4c2bb0b61ca" exitCode=0 Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.546267 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerDied","Data":"136d740584a32f4f0ba8a764195b204d2ff9fcceb418c124137ba4c2bb0b61ca"} Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.787933 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.810083 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkkfj\" (UniqueName: \"kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj\") pod \"8fa6efc2-869a-4903-9129-5106406df350\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.810610 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content\") pod \"8fa6efc2-869a-4903-9129-5106406df350\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.810692 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities\") pod \"8fa6efc2-869a-4903-9129-5106406df350\" (UID: \"8fa6efc2-869a-4903-9129-5106406df350\") " Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.812790 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities" (OuterVolumeSpecName: "utilities") pod "8fa6efc2-869a-4903-9129-5106406df350" (UID: "8fa6efc2-869a-4903-9129-5106406df350"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.876345 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj" (OuterVolumeSpecName: "kube-api-access-gkkfj") pod "8fa6efc2-869a-4903-9129-5106406df350" (UID: "8fa6efc2-869a-4903-9129-5106406df350"). InnerVolumeSpecName "kube-api-access-gkkfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.888698 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8fa6efc2-869a-4903-9129-5106406df350" (UID: "8fa6efc2-869a-4903-9129-5106406df350"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.912508 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.912729 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fa6efc2-869a-4903-9129-5106406df350-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:46 crc kubenswrapper[4797]: I0104 12:15:46.912847 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkkfj\" (UniqueName: \"kubernetes.io/projected/8fa6efc2-869a-4903-9129-5106406df350-kube-api-access-gkkfj\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.560914 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sd7wr" event={"ID":"8fa6efc2-869a-4903-9129-5106406df350","Type":"ContainerDied","Data":"c3c317a141d39f12923c7ad9cc5e58ab4553dbca59cf6b4205ffe8a6e36f163c"} Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.561040 4797 scope.go:117] "RemoveContainer" containerID="136d740584a32f4f0ba8a764195b204d2ff9fcceb418c124137ba4c2bb0b61ca" Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.561061 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sd7wr" Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.607637 4797 scope.go:117] "RemoveContainer" containerID="17c0b419b1524cfbdd4d0a57c865554e00e1bc27c5d53df5bd8f3f5543414903" Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.641016 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.649297 4797 scope.go:117] "RemoveContainer" containerID="5a0aeb24b6f28a1cfc62412c70618876f6fce2dc7990352f77860b18c2dd5d60" Jan 04 12:15:47 crc kubenswrapper[4797]: I0104 12:15:47.652899 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sd7wr"] Jan 04 12:15:49 crc kubenswrapper[4797]: I0104 12:15:49.488981 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fa6efc2-869a-4903-9129-5106406df350" path="/var/lib/kubelet/pods/8fa6efc2-869a-4903-9129-5106406df350/volumes" Jan 04 12:15:53 crc kubenswrapper[4797]: I0104 12:15:53.475868 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:15:53 crc kubenswrapper[4797]: E0104 12:15:53.476875 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:16:05 crc kubenswrapper[4797]: I0104 12:16:05.474649 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:16:05 crc kubenswrapper[4797]: E0104 12:16:05.475323 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:16:16 crc kubenswrapper[4797]: I0104 12:16:16.474477 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:16:16 crc kubenswrapper[4797]: E0104 12:16:16.475948 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.206204 4797 scope.go:117] "RemoveContainer" containerID="b817e46fd4e00fefeca15f786375e9089980050aceb8fad138a6fef75a80c940" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.239715 4797 scope.go:117] "RemoveContainer" containerID="c24b57917dd5aa7e5ce4a5adee1907c75b34a49d88fc6eb6757c983dec5cfd3a" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.269037 4797 scope.go:117] "RemoveContainer" containerID="e8f552014f76bc33708fe1e5bfac2986ac1ddb10ea417de60dbeeffb11f80a95" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.307672 4797 scope.go:117] "RemoveContainer" containerID="41c17a83276d3c0e069dd494d2af4a5622ebd15603f8ccc377515d895cb90413" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.327206 4797 scope.go:117] "RemoveContainer" containerID="a95bc6ae38195a799c2eec6d7e3b1fc38748ec8ad711d67378d10d8eb96a34a6" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.355480 4797 scope.go:117] "RemoveContainer" containerID="cc5cbdf1d8378431ace1897d3cb28372fb4bfa6af945687726bc9081a3f8a25e" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.382854 4797 scope.go:117] "RemoveContainer" containerID="117875a5dbf3fa596fd25719a04e4362dc7c701dac7b932c7f73f7d2f7984393" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.410663 4797 scope.go:117] "RemoveContainer" containerID="ec3131d9785fd559466d8e3cfc18069ec035951436574a4a4e09ab51a6f9b604" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.431525 4797 scope.go:117] "RemoveContainer" containerID="4de439556b1a7f8f1a32a117874611f6001902f3c668cb6709d4d266700b3fee" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.475510 4797 scope.go:117] "RemoveContainer" containerID="cda8ef357d1d23977cac785ecf306bc2c2c9ad45f4365b9a32302df789764ffb" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.507184 4797 scope.go:117] "RemoveContainer" containerID="959c3fabe5eb13450f485c5aa1971f64bf108f7cbc0ac7321eca9fbcad43737f" Jan 04 12:16:25 crc kubenswrapper[4797]: I0104 12:16:25.535596 4797 scope.go:117] "RemoveContainer" containerID="c848b4f27d4bb207ec3d3f60c0c0cbc9f2de9c8bbbe894359e2672858f039bfa" Jan 04 12:16:30 crc kubenswrapper[4797]: I0104 12:16:30.474922 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:16:30 crc kubenswrapper[4797]: E0104 12:16:30.475824 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:16:41 crc kubenswrapper[4797]: I0104 12:16:41.481282 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:16:41 crc kubenswrapper[4797]: E0104 12:16:41.482527 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:16:52 crc kubenswrapper[4797]: I0104 12:16:52.473911 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:16:52 crc kubenswrapper[4797]: E0104 12:16:52.474894 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:17:05 crc kubenswrapper[4797]: I0104 12:17:05.475178 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:17:05 crc kubenswrapper[4797]: E0104 12:17:05.476115 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:17:19 crc kubenswrapper[4797]: I0104 12:17:19.474685 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:17:19 crc kubenswrapper[4797]: E0104 12:17:19.475603 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:17:25 crc kubenswrapper[4797]: I0104 12:17:25.780582 4797 scope.go:117] "RemoveContainer" containerID="a7c056981801443d31bb93252636ab9e089edd5eeabc8686d37b7b291f4eac9c" Jan 04 12:17:25 crc kubenswrapper[4797]: I0104 12:17:25.847568 4797 scope.go:117] "RemoveContainer" containerID="c5681af799eef3f20aaf548557caaad36b40bee661be1cbc39d9809e2d98ecb9" Jan 04 12:17:25 crc kubenswrapper[4797]: I0104 12:17:25.899034 4797 scope.go:117] "RemoveContainer" containerID="e9781d8b11b2059d23370987bd4fe4ba9a84feaaa9d8282c33746602aff56cd0" Jan 04 12:17:25 crc kubenswrapper[4797]: I0104 12:17:25.950342 4797 scope.go:117] "RemoveContainer" containerID="cb7bf4291a031834da82973f2a48f5bc74729925017712ac6f2b09e3b4134c5a" Jan 04 12:17:25 crc kubenswrapper[4797]: I0104 12:17:25.972323 4797 scope.go:117] "RemoveContainer" containerID="5a8f25d330d8d93b94b4f89f5ab02f3d9ff1f469e845e6f615fa4dfc0aa9ad40" Jan 04 12:17:34 crc kubenswrapper[4797]: I0104 12:17:34.474188 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:17:34 crc kubenswrapper[4797]: E0104 12:17:34.474930 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:17:47 crc kubenswrapper[4797]: I0104 12:17:47.474456 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:17:47 crc kubenswrapper[4797]: E0104 12:17:47.475438 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:18:00 crc kubenswrapper[4797]: I0104 12:18:00.474604 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:18:00 crc kubenswrapper[4797]: E0104 12:18:00.475530 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:18:12 crc kubenswrapper[4797]: I0104 12:18:12.473629 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:18:12 crc kubenswrapper[4797]: E0104 12:18:12.474689 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:18:24 crc kubenswrapper[4797]: I0104 12:18:24.474366 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:18:24 crc kubenswrapper[4797]: E0104 12:18:24.475382 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:18:26 crc kubenswrapper[4797]: I0104 12:18:26.079731 4797 scope.go:117] "RemoveContainer" containerID="d42b3b430ecab50d28d72871ff41ee2a8381b2e11c2e8f2a5b95be60179f555e" Jan 04 12:18:38 crc kubenswrapper[4797]: I0104 12:18:38.474477 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:18:38 crc kubenswrapper[4797]: E0104 12:18:38.476615 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:18:49 crc kubenswrapper[4797]: I0104 12:18:49.474117 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:18:49 crc kubenswrapper[4797]: E0104 12:18:49.474869 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:19:00 crc kubenswrapper[4797]: I0104 12:19:00.474755 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:19:00 crc kubenswrapper[4797]: E0104 12:19:00.475929 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:19:14 crc kubenswrapper[4797]: I0104 12:19:14.474539 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:19:14 crc kubenswrapper[4797]: E0104 12:19:14.476085 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:19:26 crc kubenswrapper[4797]: I0104 12:19:26.475174 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:19:26 crc kubenswrapper[4797]: E0104 12:19:26.476158 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:19:39 crc kubenswrapper[4797]: I0104 12:19:39.475533 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:19:39 crc kubenswrapper[4797]: E0104 12:19:39.476788 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:19:50 crc kubenswrapper[4797]: I0104 12:19:50.474440 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:19:50 crc kubenswrapper[4797]: I0104 12:19:50.774946 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7"} Jan 04 12:22:19 crc kubenswrapper[4797]: I0104 12:22:19.492764 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:22:19 crc kubenswrapper[4797]: I0104 12:22:19.493652 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:22:49 crc kubenswrapper[4797]: I0104 12:22:49.493506 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:22:49 crc kubenswrapper[4797]: I0104 12:22:49.494165 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.142373 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:12 crc kubenswrapper[4797]: E0104 12:23:12.143271 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="extract-utilities" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.143286 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="extract-utilities" Jan 04 12:23:12 crc kubenswrapper[4797]: E0104 12:23:12.143299 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="registry-server" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.143306 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="registry-server" Jan 04 12:23:12 crc kubenswrapper[4797]: E0104 12:23:12.143336 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="extract-content" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.143344 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="extract-content" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.143507 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fa6efc2-869a-4903-9129-5106406df350" containerName="registry-server" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.144633 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.168221 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.224314 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bckfg\" (UniqueName: \"kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.224386 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.224442 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.325554 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bckfg\" (UniqueName: \"kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.325605 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.325646 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.326256 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.326523 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.360427 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bckfg\" (UniqueName: \"kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg\") pod \"redhat-operators-vc4zw\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.471655 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:12 crc kubenswrapper[4797]: I0104 12:23:12.970329 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:13 crc kubenswrapper[4797]: I0104 12:23:13.621062 4797 generic.go:334] "Generic (PLEG): container finished" podID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerID="1dc3324f0038a1b9ab758ff79d8e2f778c8749aa78b7fe1cc9d89ad57f2e0f7b" exitCode=0 Jan 04 12:23:13 crc kubenswrapper[4797]: I0104 12:23:13.621206 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerDied","Data":"1dc3324f0038a1b9ab758ff79d8e2f778c8749aa78b7fe1cc9d89ad57f2e0f7b"} Jan 04 12:23:13 crc kubenswrapper[4797]: I0104 12:23:13.621442 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerStarted","Data":"ab015ea2489465efc6d5618b47afcaf34bc66b7e1621ecbc5f8f3557eb69f804"} Jan 04 12:23:13 crc kubenswrapper[4797]: I0104 12:23:13.623322 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:23:14 crc kubenswrapper[4797]: I0104 12:23:14.628372 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerStarted","Data":"7523b06f46e20a0a7d3f29b8011ff7df3cead79b893145910fa2c822e9564ef1"} Jan 04 12:23:15 crc kubenswrapper[4797]: I0104 12:23:15.641445 4797 generic.go:334] "Generic (PLEG): container finished" podID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerID="7523b06f46e20a0a7d3f29b8011ff7df3cead79b893145910fa2c822e9564ef1" exitCode=0 Jan 04 12:23:15 crc kubenswrapper[4797]: I0104 12:23:15.641728 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerDied","Data":"7523b06f46e20a0a7d3f29b8011ff7df3cead79b893145910fa2c822e9564ef1"} Jan 04 12:23:16 crc kubenswrapper[4797]: I0104 12:23:16.651081 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerStarted","Data":"48b1c6b7b47b051228fb9fbecbf5d71ed7a1e87302a728fca1f2684a7b5303fd"} Jan 04 12:23:16 crc kubenswrapper[4797]: I0104 12:23:16.687047 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vc4zw" podStartSLOduration=2.205755964 podStartE2EDuration="4.687026953s" podCreationTimestamp="2026-01-04 12:23:12 +0000 UTC" firstStartedPulling="2026-01-04 12:23:13.623102555 +0000 UTC m=+2092.480289264" lastFinishedPulling="2026-01-04 12:23:16.104373504 +0000 UTC m=+2094.961560253" observedRunningTime="2026-01-04 12:23:16.67814366 +0000 UTC m=+2095.535330379" watchObservedRunningTime="2026-01-04 12:23:16.687026953 +0000 UTC m=+2095.544213662" Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.493028 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.493374 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.493425 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.494132 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.494229 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7" gracePeriod=600 Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.680315 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7" exitCode=0 Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.680426 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7"} Jan 04 12:23:19 crc kubenswrapper[4797]: I0104 12:23:19.680652 4797 scope.go:117] "RemoveContainer" containerID="8d15c3d2e2805dfaae1da004b3fcbbf0f747ff67ea9e790db5f22c58cbf078c2" Jan 04 12:23:20 crc kubenswrapper[4797]: I0104 12:23:20.690121 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3"} Jan 04 12:23:22 crc kubenswrapper[4797]: I0104 12:23:22.472540 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:22 crc kubenswrapper[4797]: I0104 12:23:22.473087 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:22 crc kubenswrapper[4797]: I0104 12:23:22.515950 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:22 crc kubenswrapper[4797]: I0104 12:23:22.747087 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:22 crc kubenswrapper[4797]: I0104 12:23:22.799076 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:24 crc kubenswrapper[4797]: I0104 12:23:24.718591 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vc4zw" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="registry-server" containerID="cri-o://48b1c6b7b47b051228fb9fbecbf5d71ed7a1e87302a728fca1f2684a7b5303fd" gracePeriod=2 Jan 04 12:23:27 crc kubenswrapper[4797]: I0104 12:23:27.746136 4797 generic.go:334] "Generic (PLEG): container finished" podID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerID="48b1c6b7b47b051228fb9fbecbf5d71ed7a1e87302a728fca1f2684a7b5303fd" exitCode=0 Jan 04 12:23:27 crc kubenswrapper[4797]: I0104 12:23:27.746185 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerDied","Data":"48b1c6b7b47b051228fb9fbecbf5d71ed7a1e87302a728fca1f2684a7b5303fd"} Jan 04 12:23:27 crc kubenswrapper[4797]: I0104 12:23:27.939222 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.059483 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bckfg\" (UniqueName: \"kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg\") pod \"ed6ce25e-5f27-4500-8889-105bb0e2e259\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.059565 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content\") pod \"ed6ce25e-5f27-4500-8889-105bb0e2e259\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.059595 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities\") pod \"ed6ce25e-5f27-4500-8889-105bb0e2e259\" (UID: \"ed6ce25e-5f27-4500-8889-105bb0e2e259\") " Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.060757 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities" (OuterVolumeSpecName: "utilities") pod "ed6ce25e-5f27-4500-8889-105bb0e2e259" (UID: "ed6ce25e-5f27-4500-8889-105bb0e2e259"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.065917 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg" (OuterVolumeSpecName: "kube-api-access-bckfg") pod "ed6ce25e-5f27-4500-8889-105bb0e2e259" (UID: "ed6ce25e-5f27-4500-8889-105bb0e2e259"). InnerVolumeSpecName "kube-api-access-bckfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.161179 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.161234 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bckfg\" (UniqueName: \"kubernetes.io/projected/ed6ce25e-5f27-4500-8889-105bb0e2e259-kube-api-access-bckfg\") on node \"crc\" DevicePath \"\"" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.225303 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed6ce25e-5f27-4500-8889-105bb0e2e259" (UID: "ed6ce25e-5f27-4500-8889-105bb0e2e259"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.262200 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6ce25e-5f27-4500-8889-105bb0e2e259-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.756020 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vc4zw" event={"ID":"ed6ce25e-5f27-4500-8889-105bb0e2e259","Type":"ContainerDied","Data":"ab015ea2489465efc6d5618b47afcaf34bc66b7e1621ecbc5f8f3557eb69f804"} Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.756131 4797 scope.go:117] "RemoveContainer" containerID="48b1c6b7b47b051228fb9fbecbf5d71ed7a1e87302a728fca1f2684a7b5303fd" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.756064 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vc4zw" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.780805 4797 scope.go:117] "RemoveContainer" containerID="7523b06f46e20a0a7d3f29b8011ff7df3cead79b893145910fa2c822e9564ef1" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.817260 4797 scope.go:117] "RemoveContainer" containerID="1dc3324f0038a1b9ab758ff79d8e2f778c8749aa78b7fe1cc9d89ad57f2e0f7b" Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.821433 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:28 crc kubenswrapper[4797]: I0104 12:23:28.833772 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vc4zw"] Jan 04 12:23:29 crc kubenswrapper[4797]: I0104 12:23:29.492494 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" path="/var/lib/kubelet/pods/ed6ce25e-5f27-4500-8889-105bb0e2e259/volumes" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.887509 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:23:50 crc kubenswrapper[4797]: E0104 12:23:50.888609 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="registry-server" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.888624 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="registry-server" Jan 04 12:23:50 crc kubenswrapper[4797]: E0104 12:23:50.888636 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="extract-content" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.888643 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="extract-content" Jan 04 12:23:50 crc kubenswrapper[4797]: E0104 12:23:50.888661 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="extract-utilities" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.888668 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="extract-utilities" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.888875 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6ce25e-5f27-4500-8889-105bb0e2e259" containerName="registry-server" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.890107 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:50 crc kubenswrapper[4797]: I0104 12:23:50.907658 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.032239 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssvmm\" (UniqueName: \"kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.032608 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.032806 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.135682 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.136149 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.137146 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.136455 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.137770 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssvmm\" (UniqueName: \"kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.168837 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssvmm\" (UniqueName: \"kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm\") pod \"certified-operators-m825c\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.231675 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.694524 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.992186 4797 generic.go:334] "Generic (PLEG): container finished" podID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerID="167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2" exitCode=0 Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.992225 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerDied","Data":"167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2"} Jan 04 12:23:51 crc kubenswrapper[4797]: I0104 12:23:51.992249 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerStarted","Data":"6c5b4dcf66519d0b24681cf7126ce7d86c1ee4cca9298307aa6ca96b8f0d40d6"} Jan 04 12:23:53 crc kubenswrapper[4797]: I0104 12:23:53.005519 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerStarted","Data":"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd"} Jan 04 12:23:54 crc kubenswrapper[4797]: I0104 12:23:54.017814 4797 generic.go:334] "Generic (PLEG): container finished" podID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerID="508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd" exitCode=0 Jan 04 12:23:54 crc kubenswrapper[4797]: I0104 12:23:54.018089 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerDied","Data":"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd"} Jan 04 12:23:55 crc kubenswrapper[4797]: I0104 12:23:55.030482 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerStarted","Data":"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05"} Jan 04 12:23:55 crc kubenswrapper[4797]: I0104 12:23:55.057369 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m825c" podStartSLOduration=2.542753352 podStartE2EDuration="5.057342905s" podCreationTimestamp="2026-01-04 12:23:50 +0000 UTC" firstStartedPulling="2026-01-04 12:23:51.993774016 +0000 UTC m=+2130.850960725" lastFinishedPulling="2026-01-04 12:23:54.508363559 +0000 UTC m=+2133.365550278" observedRunningTime="2026-01-04 12:23:55.051952304 +0000 UTC m=+2133.909139053" watchObservedRunningTime="2026-01-04 12:23:55.057342905 +0000 UTC m=+2133.914529624" Jan 04 12:24:01 crc kubenswrapper[4797]: I0104 12:24:01.232392 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:01 crc kubenswrapper[4797]: I0104 12:24:01.233346 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:01 crc kubenswrapper[4797]: I0104 12:24:01.310448 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:02 crc kubenswrapper[4797]: I0104 12:24:02.174497 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:02 crc kubenswrapper[4797]: I0104 12:24:02.250569 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:24:04 crc kubenswrapper[4797]: I0104 12:24:04.111616 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m825c" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="registry-server" containerID="cri-o://35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05" gracePeriod=2 Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.025827 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.120571 4797 generic.go:334] "Generic (PLEG): container finished" podID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerID="35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05" exitCode=0 Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.120611 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerDied","Data":"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05"} Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.120642 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m825c" event={"ID":"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d","Type":"ContainerDied","Data":"6c5b4dcf66519d0b24681cf7126ce7d86c1ee4cca9298307aa6ca96b8f0d40d6"} Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.120658 4797 scope.go:117] "RemoveContainer" containerID="35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.120718 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m825c" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.148071 4797 scope.go:117] "RemoveContainer" containerID="508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.164622 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content\") pod \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.164763 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities\") pod \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.164840 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssvmm\" (UniqueName: \"kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm\") pod \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\" (UID: \"4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d\") " Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.165844 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities" (OuterVolumeSpecName: "utilities") pod "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" (UID: "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.170161 4797 scope.go:117] "RemoveContainer" containerID="167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.170199 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm" (OuterVolumeSpecName: "kube-api-access-ssvmm") pod "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" (UID: "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d"). InnerVolumeSpecName "kube-api-access-ssvmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.219189 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" (UID: "4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.222872 4797 scope.go:117] "RemoveContainer" containerID="35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05" Jan 04 12:24:05 crc kubenswrapper[4797]: E0104 12:24:05.223359 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05\": container with ID starting with 35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05 not found: ID does not exist" containerID="35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.223389 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05"} err="failed to get container status \"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05\": rpc error: code = NotFound desc = could not find container \"35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05\": container with ID starting with 35e947abf9ba1028f1bd943578719c4b7ba5239d32ffd003df109578e9a0dc05 not found: ID does not exist" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.223408 4797 scope.go:117] "RemoveContainer" containerID="508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd" Jan 04 12:24:05 crc kubenswrapper[4797]: E0104 12:24:05.223646 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd\": container with ID starting with 508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd not found: ID does not exist" containerID="508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.223688 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd"} err="failed to get container status \"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd\": rpc error: code = NotFound desc = could not find container \"508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd\": container with ID starting with 508a49d1ba38eb64634a3606ee00408f45524b2ecaeb9b5f965d71ab88368dbd not found: ID does not exist" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.223714 4797 scope.go:117] "RemoveContainer" containerID="167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2" Jan 04 12:24:05 crc kubenswrapper[4797]: E0104 12:24:05.223917 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2\": container with ID starting with 167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2 not found: ID does not exist" containerID="167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.223941 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2"} err="failed to get container status \"167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2\": rpc error: code = NotFound desc = could not find container \"167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2\": container with ID starting with 167068d4b5aa98a39bf2f3363891c359c8b8b82d687ebe6f71a69f709488cbb2 not found: ID does not exist" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.266191 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssvmm\" (UniqueName: \"kubernetes.io/projected/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-kube-api-access-ssvmm\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.266226 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.266236 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.460289 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.466393 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m825c"] Jan 04 12:24:05 crc kubenswrapper[4797]: I0104 12:24:05.485378 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" path="/var/lib/kubelet/pods/4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d/volumes" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.833739 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:19 crc kubenswrapper[4797]: E0104 12:25:19.835280 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="extract-content" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.835314 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="extract-content" Jan 04 12:25:19 crc kubenswrapper[4797]: E0104 12:25:19.835361 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="registry-server" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.835378 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="registry-server" Jan 04 12:25:19 crc kubenswrapper[4797]: E0104 12:25:19.835407 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="extract-utilities" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.835424 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="extract-utilities" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.835982 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a43ebba-4ff2-4588-9e58-2aa9a8e9c14d" containerName="registry-server" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.839480 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.860654 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.967893 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.968098 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrnkn\" (UniqueName: \"kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:19 crc kubenswrapper[4797]: I0104 12:25:19.968147 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.069417 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.069519 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrnkn\" (UniqueName: \"kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.069594 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.070148 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.070445 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.099549 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrnkn\" (UniqueName: \"kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn\") pod \"redhat-marketplace-jlr7x\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.188245 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.627099 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.989318 4797 generic.go:334] "Generic (PLEG): container finished" podID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerID="44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e" exitCode=0 Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.989387 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerDied","Data":"44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e"} Jan 04 12:25:20 crc kubenswrapper[4797]: I0104 12:25:20.989424 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerStarted","Data":"0d8f7c352c47ee7011703577df0374ad00489911b30c86d3eac284758f177abc"} Jan 04 12:25:23 crc kubenswrapper[4797]: I0104 12:25:23.011652 4797 generic.go:334] "Generic (PLEG): container finished" podID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerID="393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37" exitCode=0 Jan 04 12:25:23 crc kubenswrapper[4797]: I0104 12:25:23.011763 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerDied","Data":"393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37"} Jan 04 12:25:24 crc kubenswrapper[4797]: I0104 12:25:24.022342 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerStarted","Data":"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4"} Jan 04 12:25:24 crc kubenswrapper[4797]: I0104 12:25:24.046178 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jlr7x" podStartSLOduration=2.490298389 podStartE2EDuration="5.046154505s" podCreationTimestamp="2026-01-04 12:25:19 +0000 UTC" firstStartedPulling="2026-01-04 12:25:20.991641565 +0000 UTC m=+2219.848828314" lastFinishedPulling="2026-01-04 12:25:23.547497721 +0000 UTC m=+2222.404684430" observedRunningTime="2026-01-04 12:25:24.043586538 +0000 UTC m=+2222.900773247" watchObservedRunningTime="2026-01-04 12:25:24.046154505 +0000 UTC m=+2222.903341214" Jan 04 12:25:30 crc kubenswrapper[4797]: I0104 12:25:30.189467 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:30 crc kubenswrapper[4797]: I0104 12:25:30.190011 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:30 crc kubenswrapper[4797]: I0104 12:25:30.264150 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:31 crc kubenswrapper[4797]: I0104 12:25:31.152854 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:32 crc kubenswrapper[4797]: I0104 12:25:32.512193 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:33 crc kubenswrapper[4797]: I0104 12:25:33.115195 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jlr7x" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="registry-server" containerID="cri-o://cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4" gracePeriod=2 Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.081496 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.127665 4797 generic.go:334] "Generic (PLEG): container finished" podID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerID="cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4" exitCode=0 Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.127734 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerDied","Data":"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4"} Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.127815 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlr7x" event={"ID":"7d69b12c-6e75-4281-ab7a-131381ba06a9","Type":"ContainerDied","Data":"0d8f7c352c47ee7011703577df0374ad00489911b30c86d3eac284758f177abc"} Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.127861 4797 scope.go:117] "RemoveContainer" containerID="cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.127868 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlr7x" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.164241 4797 scope.go:117] "RemoveContainer" containerID="393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.189775 4797 scope.go:117] "RemoveContainer" containerID="44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.200201 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities\") pod \"7d69b12c-6e75-4281-ab7a-131381ba06a9\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.200259 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content\") pod \"7d69b12c-6e75-4281-ab7a-131381ba06a9\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.200374 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrnkn\" (UniqueName: \"kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn\") pod \"7d69b12c-6e75-4281-ab7a-131381ba06a9\" (UID: \"7d69b12c-6e75-4281-ab7a-131381ba06a9\") " Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.201349 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities" (OuterVolumeSpecName: "utilities") pod "7d69b12c-6e75-4281-ab7a-131381ba06a9" (UID: "7d69b12c-6e75-4281-ab7a-131381ba06a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.209584 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn" (OuterVolumeSpecName: "kube-api-access-nrnkn") pod "7d69b12c-6e75-4281-ab7a-131381ba06a9" (UID: "7d69b12c-6e75-4281-ab7a-131381ba06a9"). InnerVolumeSpecName "kube-api-access-nrnkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.232230 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d69b12c-6e75-4281-ab7a-131381ba06a9" (UID: "7d69b12c-6e75-4281-ab7a-131381ba06a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.266422 4797 scope.go:117] "RemoveContainer" containerID="cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4" Jan 04 12:25:34 crc kubenswrapper[4797]: E0104 12:25:34.267339 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4\": container with ID starting with cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4 not found: ID does not exist" containerID="cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.267401 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4"} err="failed to get container status \"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4\": rpc error: code = NotFound desc = could not find container \"cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4\": container with ID starting with cfddc8e98b218e5c9ea8efaf76a048eeffe899a20085e8d3b93ed37f735635a4 not found: ID does not exist" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.267434 4797 scope.go:117] "RemoveContainer" containerID="393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37" Jan 04 12:25:34 crc kubenswrapper[4797]: E0104 12:25:34.268597 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37\": container with ID starting with 393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37 not found: ID does not exist" containerID="393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.268650 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37"} err="failed to get container status \"393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37\": rpc error: code = NotFound desc = could not find container \"393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37\": container with ID starting with 393e10b7a644476f83918f7ea1bcc2e677fce3b801066a97e4737cbf33939c37 not found: ID does not exist" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.268684 4797 scope.go:117] "RemoveContainer" containerID="44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e" Jan 04 12:25:34 crc kubenswrapper[4797]: E0104 12:25:34.269210 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e\": container with ID starting with 44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e not found: ID does not exist" containerID="44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.269254 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e"} err="failed to get container status \"44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e\": rpc error: code = NotFound desc = could not find container \"44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e\": container with ID starting with 44059e061502de58329e141db72509e3a005716d392568f0dd2ea33fce67867e not found: ID does not exist" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.301763 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.301809 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d69b12c-6e75-4281-ab7a-131381ba06a9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.301831 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrnkn\" (UniqueName: \"kubernetes.io/projected/7d69b12c-6e75-4281-ab7a-131381ba06a9-kube-api-access-nrnkn\") on node \"crc\" DevicePath \"\"" Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.487134 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:34 crc kubenswrapper[4797]: I0104 12:25:34.493233 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlr7x"] Jan 04 12:25:35 crc kubenswrapper[4797]: I0104 12:25:35.493242 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" path="/var/lib/kubelet/pods/7d69b12c-6e75-4281-ab7a-131381ba06a9/volumes" Jan 04 12:25:49 crc kubenswrapper[4797]: I0104 12:25:49.493357 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:25:49 crc kubenswrapper[4797]: I0104 12:25:49.494224 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:26:19 crc kubenswrapper[4797]: I0104 12:26:19.493014 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:26:19 crc kubenswrapper[4797]: I0104 12:26:19.493943 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.493723 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.494561 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.494618 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.495323 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.495425 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" gracePeriod=600 Jan 04 12:26:49 crc kubenswrapper[4797]: E0104 12:26:49.626291 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.807406 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" exitCode=0 Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.807478 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3"} Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.807551 4797 scope.go:117] "RemoveContainer" containerID="34690dcc6dc9eb54d9f94d4eb212cbdcc6f8eb8989068f2be62bc32a6ad3fac7" Jan 04 12:26:49 crc kubenswrapper[4797]: I0104 12:26:49.808336 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:26:49 crc kubenswrapper[4797]: E0104 12:26:49.808742 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:04 crc kubenswrapper[4797]: I0104 12:27:04.475022 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:27:04 crc kubenswrapper[4797]: E0104 12:27:04.476142 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:15 crc kubenswrapper[4797]: I0104 12:27:15.474647 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:27:15 crc kubenswrapper[4797]: E0104 12:27:15.475290 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:27 crc kubenswrapper[4797]: I0104 12:27:27.475238 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:27:27 crc kubenswrapper[4797]: E0104 12:27:27.476203 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:38 crc kubenswrapper[4797]: I0104 12:27:38.473818 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:27:38 crc kubenswrapper[4797]: E0104 12:27:38.474908 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.524622 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:40 crc kubenswrapper[4797]: E0104 12:27:40.525591 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="extract-utilities" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.525624 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="extract-utilities" Jan 04 12:27:40 crc kubenswrapper[4797]: E0104 12:27:40.525652 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="extract-content" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.525673 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="extract-content" Jan 04 12:27:40 crc kubenswrapper[4797]: E0104 12:27:40.525699 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="registry-server" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.525716 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="registry-server" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.526142 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d69b12c-6e75-4281-ab7a-131381ba06a9" containerName="registry-server" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.531620 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.547522 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.629531 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.629581 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.629613 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2gdl\" (UniqueName: \"kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.731825 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.731874 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.731913 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2gdl\" (UniqueName: \"kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.732490 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.732551 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.760753 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2gdl\" (UniqueName: \"kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl\") pod \"community-operators-dc4qb\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:40 crc kubenswrapper[4797]: I0104 12:27:40.880319 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:41 crc kubenswrapper[4797]: I0104 12:27:41.401305 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:42 crc kubenswrapper[4797]: I0104 12:27:42.259669 4797 generic.go:334] "Generic (PLEG): container finished" podID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerID="66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3" exitCode=0 Jan 04 12:27:42 crc kubenswrapper[4797]: I0104 12:27:42.259755 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerDied","Data":"66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3"} Jan 04 12:27:42 crc kubenswrapper[4797]: I0104 12:27:42.261151 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerStarted","Data":"fbd28178c45fa2dac062757394893493a05f63ec73d5bd38d5cd3de0fa04ba74"} Jan 04 12:27:43 crc kubenswrapper[4797]: I0104 12:27:43.270806 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerStarted","Data":"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c"} Jan 04 12:27:44 crc kubenswrapper[4797]: I0104 12:27:44.284203 4797 generic.go:334] "Generic (PLEG): container finished" podID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerID="198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c" exitCode=0 Jan 04 12:27:44 crc kubenswrapper[4797]: I0104 12:27:44.284307 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerDied","Data":"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c"} Jan 04 12:27:45 crc kubenswrapper[4797]: I0104 12:27:45.297116 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerStarted","Data":"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6"} Jan 04 12:27:45 crc kubenswrapper[4797]: I0104 12:27:45.324215 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dc4qb" podStartSLOduration=2.8952402360000002 podStartE2EDuration="5.324199243s" podCreationTimestamp="2026-01-04 12:27:40 +0000 UTC" firstStartedPulling="2026-01-04 12:27:42.263094773 +0000 UTC m=+2361.120281522" lastFinishedPulling="2026-01-04 12:27:44.69205381 +0000 UTC m=+2363.549240529" observedRunningTime="2026-01-04 12:27:45.32026918 +0000 UTC m=+2364.177455899" watchObservedRunningTime="2026-01-04 12:27:45.324199243 +0000 UTC m=+2364.181385952" Jan 04 12:27:50 crc kubenswrapper[4797]: I0104 12:27:50.474431 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:27:50 crc kubenswrapper[4797]: E0104 12:27:50.475256 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:27:50 crc kubenswrapper[4797]: I0104 12:27:50.880607 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:50 crc kubenswrapper[4797]: I0104 12:27:50.880687 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:50 crc kubenswrapper[4797]: I0104 12:27:50.956732 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:51 crc kubenswrapper[4797]: I0104 12:27:51.404288 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:51 crc kubenswrapper[4797]: I0104 12:27:51.451169 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:53 crc kubenswrapper[4797]: I0104 12:27:53.369380 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dc4qb" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="registry-server" containerID="cri-o://2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6" gracePeriod=2 Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.342671 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.383829 4797 generic.go:334] "Generic (PLEG): container finished" podID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerID="2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6" exitCode=0 Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.383909 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerDied","Data":"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6"} Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.383962 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dc4qb" event={"ID":"c659a36c-2020-42b4-ab12-aa4051bf7487","Type":"ContainerDied","Data":"fbd28178c45fa2dac062757394893493a05f63ec73d5bd38d5cd3de0fa04ba74"} Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.384040 4797 scope.go:117] "RemoveContainer" containerID="2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.384323 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dc4qb" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.406380 4797 scope.go:117] "RemoveContainer" containerID="198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.424299 4797 scope.go:117] "RemoveContainer" containerID="66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.446099 4797 scope.go:117] "RemoveContainer" containerID="2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6" Jan 04 12:27:54 crc kubenswrapper[4797]: E0104 12:27:54.446623 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6\": container with ID starting with 2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6 not found: ID does not exist" containerID="2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.446675 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6"} err="failed to get container status \"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6\": rpc error: code = NotFound desc = could not find container \"2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6\": container with ID starting with 2ebd4b3d692135b8a5b2a0046d415ec8d34f0baaace5e88aee8cd064a6b5c2c6 not found: ID does not exist" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.446710 4797 scope.go:117] "RemoveContainer" containerID="198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c" Jan 04 12:27:54 crc kubenswrapper[4797]: E0104 12:27:54.447241 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c\": container with ID starting with 198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c not found: ID does not exist" containerID="198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.447280 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c"} err="failed to get container status \"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c\": rpc error: code = NotFound desc = could not find container \"198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c\": container with ID starting with 198e1bb883e89381f42d0128d38616923fd7cbdbb1b5155a15528afb5d3e550c not found: ID does not exist" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.447306 4797 scope.go:117] "RemoveContainer" containerID="66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3" Jan 04 12:27:54 crc kubenswrapper[4797]: E0104 12:27:54.447548 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3\": container with ID starting with 66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3 not found: ID does not exist" containerID="66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.447564 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3"} err="failed to get container status \"66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3\": rpc error: code = NotFound desc = could not find container \"66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3\": container with ID starting with 66d10c966c6ad09cf0450baf4ba054177f4961951af133f492ff48c0ab2fb2d3 not found: ID does not exist" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.448336 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities\") pod \"c659a36c-2020-42b4-ab12-aa4051bf7487\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.448394 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content\") pod \"c659a36c-2020-42b4-ab12-aa4051bf7487\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.448421 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2gdl\" (UniqueName: \"kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl\") pod \"c659a36c-2020-42b4-ab12-aa4051bf7487\" (UID: \"c659a36c-2020-42b4-ab12-aa4051bf7487\") " Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.449313 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities" (OuterVolumeSpecName: "utilities") pod "c659a36c-2020-42b4-ab12-aa4051bf7487" (UID: "c659a36c-2020-42b4-ab12-aa4051bf7487"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.458481 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl" (OuterVolumeSpecName: "kube-api-access-w2gdl") pod "c659a36c-2020-42b4-ab12-aa4051bf7487" (UID: "c659a36c-2020-42b4-ab12-aa4051bf7487"). InnerVolumeSpecName "kube-api-access-w2gdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.503278 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c659a36c-2020-42b4-ab12-aa4051bf7487" (UID: "c659a36c-2020-42b4-ab12-aa4051bf7487"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.549681 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2gdl\" (UniqueName: \"kubernetes.io/projected/c659a36c-2020-42b4-ab12-aa4051bf7487-kube-api-access-w2gdl\") on node \"crc\" DevicePath \"\"" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.549723 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.549740 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c659a36c-2020-42b4-ab12-aa4051bf7487-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.730629 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:54 crc kubenswrapper[4797]: I0104 12:27:54.740960 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dc4qb"] Jan 04 12:27:55 crc kubenswrapper[4797]: I0104 12:27:55.488163 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" path="/var/lib/kubelet/pods/c659a36c-2020-42b4-ab12-aa4051bf7487/volumes" Jan 04 12:28:03 crc kubenswrapper[4797]: I0104 12:28:03.474126 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:28:03 crc kubenswrapper[4797]: E0104 12:28:03.475119 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:28:15 crc kubenswrapper[4797]: I0104 12:28:15.474500 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:28:15 crc kubenswrapper[4797]: E0104 12:28:15.475445 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:28:28 crc kubenswrapper[4797]: I0104 12:28:28.474098 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:28:28 crc kubenswrapper[4797]: E0104 12:28:28.475378 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:28:39 crc kubenswrapper[4797]: I0104 12:28:39.475473 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:28:39 crc kubenswrapper[4797]: E0104 12:28:39.476594 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:28:54 crc kubenswrapper[4797]: I0104 12:28:54.474603 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:28:54 crc kubenswrapper[4797]: E0104 12:28:54.475636 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:29:05 crc kubenswrapper[4797]: I0104 12:29:05.474922 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:29:05 crc kubenswrapper[4797]: E0104 12:29:05.475981 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:29:16 crc kubenswrapper[4797]: I0104 12:29:16.475626 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:29:16 crc kubenswrapper[4797]: E0104 12:29:16.476897 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:29:29 crc kubenswrapper[4797]: I0104 12:29:29.480417 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:29:29 crc kubenswrapper[4797]: E0104 12:29:29.481557 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:29:41 crc kubenswrapper[4797]: I0104 12:29:41.481968 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:29:41 crc kubenswrapper[4797]: E0104 12:29:41.482692 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:29:54 crc kubenswrapper[4797]: I0104 12:29:54.474256 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:29:54 crc kubenswrapper[4797]: E0104 12:29:54.475510 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.167640 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2"] Jan 04 12:30:00 crc kubenswrapper[4797]: E0104 12:30:00.169434 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.169463 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[4797]: E0104 12:30:00.169558 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.169578 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[4797]: E0104 12:30:00.169644 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.169658 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.173244 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c659a36c-2020-42b4-ab12-aa4051bf7487" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.174233 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.176366 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.177125 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.178894 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2"] Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.257951 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.258012 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.258055 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s49qr\" (UniqueName: \"kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.359445 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.359487 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.359518 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s49qr\" (UniqueName: \"kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.360332 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.366047 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.385339 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s49qr\" (UniqueName: \"kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr\") pod \"collect-profiles-29458830-wgzl2\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.503870 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:00 crc kubenswrapper[4797]: I0104 12:30:00.978357 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2"] Jan 04 12:30:01 crc kubenswrapper[4797]: I0104 12:30:01.489959 4797 generic.go:334] "Generic (PLEG): container finished" podID="e2e1aff3-4b88-499a-a052-69e43f1bbab4" containerID="137c16b6b7232b63a3e0dcbc6eb96c96e87221ae0b7cba004b23097b2d51bcf3" exitCode=0 Jan 04 12:30:01 crc kubenswrapper[4797]: I0104 12:30:01.490287 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" event={"ID":"e2e1aff3-4b88-499a-a052-69e43f1bbab4","Type":"ContainerDied","Data":"137c16b6b7232b63a3e0dcbc6eb96c96e87221ae0b7cba004b23097b2d51bcf3"} Jan 04 12:30:01 crc kubenswrapper[4797]: I0104 12:30:01.490313 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" event={"ID":"e2e1aff3-4b88-499a-a052-69e43f1bbab4","Type":"ContainerStarted","Data":"e2cb6431bfd2001d38fec698ba74bde1c9f7a83adc63116cb684e190ece9271e"} Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.814779 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.900393 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s49qr\" (UniqueName: \"kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr\") pod \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.900459 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume\") pod \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.900497 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume\") pod \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\" (UID: \"e2e1aff3-4b88-499a-a052-69e43f1bbab4\") " Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.901411 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume" (OuterVolumeSpecName: "config-volume") pod "e2e1aff3-4b88-499a-a052-69e43f1bbab4" (UID: "e2e1aff3-4b88-499a-a052-69e43f1bbab4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.911092 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e2e1aff3-4b88-499a-a052-69e43f1bbab4" (UID: "e2e1aff3-4b88-499a-a052-69e43f1bbab4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:30:02 crc kubenswrapper[4797]: I0104 12:30:02.924197 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr" (OuterVolumeSpecName: "kube-api-access-s49qr") pod "e2e1aff3-4b88-499a-a052-69e43f1bbab4" (UID: "e2e1aff3-4b88-499a-a052-69e43f1bbab4"). InnerVolumeSpecName "kube-api-access-s49qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.001494 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s49qr\" (UniqueName: \"kubernetes.io/projected/e2e1aff3-4b88-499a-a052-69e43f1bbab4-kube-api-access-s49qr\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.001523 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e2e1aff3-4b88-499a-a052-69e43f1bbab4-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.001533 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e2e1aff3-4b88-499a-a052-69e43f1bbab4-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.510647 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" event={"ID":"e2e1aff3-4b88-499a-a052-69e43f1bbab4","Type":"ContainerDied","Data":"e2cb6431bfd2001d38fec698ba74bde1c9f7a83adc63116cb684e190ece9271e"} Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.510698 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2cb6431bfd2001d38fec698ba74bde1c9f7a83adc63116cb684e190ece9271e" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.510768 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-wgzl2" Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.903572 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb"] Jan 04 12:30:03 crc kubenswrapper[4797]: I0104 12:30:03.908834 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-rwnbb"] Jan 04 12:30:05 crc kubenswrapper[4797]: I0104 12:30:05.504526 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5993e116-1e4f-47ba-a301-47a026bdbf14" path="/var/lib/kubelet/pods/5993e116-1e4f-47ba-a301-47a026bdbf14/volumes" Jan 04 12:30:08 crc kubenswrapper[4797]: I0104 12:30:08.474490 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:30:08 crc kubenswrapper[4797]: E0104 12:30:08.475458 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:30:21 crc kubenswrapper[4797]: I0104 12:30:21.484802 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:30:21 crc kubenswrapper[4797]: E0104 12:30:21.493414 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:30:26 crc kubenswrapper[4797]: I0104 12:30:26.430898 4797 scope.go:117] "RemoveContainer" containerID="03e323b756b695f27416c0f70eb92c3ef368ecc0d2d856fdc64990a569e754be" Jan 04 12:30:33 crc kubenswrapper[4797]: I0104 12:30:33.474305 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:30:33 crc kubenswrapper[4797]: E0104 12:30:33.475232 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:30:45 crc kubenswrapper[4797]: I0104 12:30:45.475374 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:30:45 crc kubenswrapper[4797]: E0104 12:30:45.476418 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:30:57 crc kubenswrapper[4797]: I0104 12:30:57.474426 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:30:57 crc kubenswrapper[4797]: E0104 12:30:57.478832 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:31:11 crc kubenswrapper[4797]: I0104 12:31:11.482218 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:31:11 crc kubenswrapper[4797]: E0104 12:31:11.484948 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:31:24 crc kubenswrapper[4797]: I0104 12:31:24.473902 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:31:24 crc kubenswrapper[4797]: E0104 12:31:24.474700 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:31:36 crc kubenswrapper[4797]: I0104 12:31:36.473964 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:31:36 crc kubenswrapper[4797]: E0104 12:31:36.475279 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:31:51 crc kubenswrapper[4797]: I0104 12:31:51.493031 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:31:52 crc kubenswrapper[4797]: I0104 12:31:52.566880 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b"} Jan 04 12:34:19 crc kubenswrapper[4797]: I0104 12:34:19.492902 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:34:19 crc kubenswrapper[4797]: I0104 12:34:19.493354 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:34:49 crc kubenswrapper[4797]: I0104 12:34:49.493540 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:34:49 crc kubenswrapper[4797]: I0104 12:34:49.494252 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.385855 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:34:55 crc kubenswrapper[4797]: E0104 12:34:55.387174 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e1aff3-4b88-499a-a052-69e43f1bbab4" containerName="collect-profiles" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.387201 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e1aff3-4b88-499a-a052-69e43f1bbab4" containerName="collect-profiles" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.389139 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e1aff3-4b88-499a-a052-69e43f1bbab4" containerName="collect-profiles" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.399957 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.435546 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.553451 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8f8x\" (UniqueName: \"kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.553601 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.553683 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.655376 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8f8x\" (UniqueName: \"kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.655468 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.655504 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.656844 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.656919 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.689424 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8f8x\" (UniqueName: \"kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x\") pod \"certified-operators-qf8jz\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:55 crc kubenswrapper[4797]: I0104 12:34:55.769070 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:34:56 crc kubenswrapper[4797]: I0104 12:34:56.243061 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:34:56 crc kubenswrapper[4797]: I0104 12:34:56.405968 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerStarted","Data":"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c"} Jan 04 12:34:56 crc kubenswrapper[4797]: I0104 12:34:56.406024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerStarted","Data":"0cac9909337411dc770ea95b1c64b7c024598000543e3d55a1a1bc6cd2c659fd"} Jan 04 12:34:56 crc kubenswrapper[4797]: E0104 12:34:56.495552 4797 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda10d82a4_83ae_4f36_a9f6_5d091953763a.slice/crio-0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:34:57 crc kubenswrapper[4797]: I0104 12:34:57.416952 4797 generic.go:334] "Generic (PLEG): container finished" podID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerID="0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c" exitCode=0 Jan 04 12:34:57 crc kubenswrapper[4797]: I0104 12:34:57.417022 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerDied","Data":"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c"} Jan 04 12:34:57 crc kubenswrapper[4797]: I0104 12:34:57.419864 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:34:58 crc kubenswrapper[4797]: I0104 12:34:58.429788 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerStarted","Data":"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050"} Jan 04 12:34:59 crc kubenswrapper[4797]: I0104 12:34:59.452840 4797 generic.go:334] "Generic (PLEG): container finished" podID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerID="e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050" exitCode=0 Jan 04 12:34:59 crc kubenswrapper[4797]: I0104 12:34:59.452895 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerDied","Data":"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050"} Jan 04 12:35:00 crc kubenswrapper[4797]: I0104 12:35:00.466605 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerStarted","Data":"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796"} Jan 04 12:35:00 crc kubenswrapper[4797]: I0104 12:35:00.497409 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qf8jz" podStartSLOduration=3.028757287 podStartE2EDuration="5.497384675s" podCreationTimestamp="2026-01-04 12:34:55 +0000 UTC" firstStartedPulling="2026-01-04 12:34:57.419662867 +0000 UTC m=+2796.276849576" lastFinishedPulling="2026-01-04 12:34:59.888290245 +0000 UTC m=+2798.745476964" observedRunningTime="2026-01-04 12:35:00.493323159 +0000 UTC m=+2799.350509908" watchObservedRunningTime="2026-01-04 12:35:00.497384675 +0000 UTC m=+2799.354571414" Jan 04 12:35:05 crc kubenswrapper[4797]: I0104 12:35:05.769692 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:05 crc kubenswrapper[4797]: I0104 12:35:05.769776 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:05 crc kubenswrapper[4797]: I0104 12:35:05.845835 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:06 crc kubenswrapper[4797]: I0104 12:35:06.582169 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:06 crc kubenswrapper[4797]: I0104 12:35:06.653806 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:35:08 crc kubenswrapper[4797]: I0104 12:35:08.538323 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qf8jz" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="registry-server" containerID="cri-o://99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796" gracePeriod=2 Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.001902 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.079477 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content\") pod \"a10d82a4-83ae-4f36-a9f6-5d091953763a\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.079634 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities\") pod \"a10d82a4-83ae-4f36-a9f6-5d091953763a\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.079694 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8f8x\" (UniqueName: \"kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x\") pod \"a10d82a4-83ae-4f36-a9f6-5d091953763a\" (UID: \"a10d82a4-83ae-4f36-a9f6-5d091953763a\") " Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.082086 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities" (OuterVolumeSpecName: "utilities") pod "a10d82a4-83ae-4f36-a9f6-5d091953763a" (UID: "a10d82a4-83ae-4f36-a9f6-5d091953763a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.086287 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x" (OuterVolumeSpecName: "kube-api-access-p8f8x") pod "a10d82a4-83ae-4f36-a9f6-5d091953763a" (UID: "a10d82a4-83ae-4f36-a9f6-5d091953763a"). InnerVolumeSpecName "kube-api-access-p8f8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.131487 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a10d82a4-83ae-4f36-a9f6-5d091953763a" (UID: "a10d82a4-83ae-4f36-a9f6-5d091953763a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.181606 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.181657 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a10d82a4-83ae-4f36-a9f6-5d091953763a-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.181680 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8f8x\" (UniqueName: \"kubernetes.io/projected/a10d82a4-83ae-4f36-a9f6-5d091953763a-kube-api-access-p8f8x\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.554340 4797 generic.go:334] "Generic (PLEG): container finished" podID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerID="99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796" exitCode=0 Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.554406 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerDied","Data":"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796"} Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.554440 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qf8jz" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.554465 4797 scope.go:117] "RemoveContainer" containerID="99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.554449 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qf8jz" event={"ID":"a10d82a4-83ae-4f36-a9f6-5d091953763a","Type":"ContainerDied","Data":"0cac9909337411dc770ea95b1c64b7c024598000543e3d55a1a1bc6cd2c659fd"} Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.597508 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.598222 4797 scope.go:117] "RemoveContainer" containerID="e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.604662 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qf8jz"] Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.628937 4797 scope.go:117] "RemoveContainer" containerID="0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.652563 4797 scope.go:117] "RemoveContainer" containerID="99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796" Jan 04 12:35:09 crc kubenswrapper[4797]: E0104 12:35:09.653365 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796\": container with ID starting with 99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796 not found: ID does not exist" containerID="99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.653422 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796"} err="failed to get container status \"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796\": rpc error: code = NotFound desc = could not find container \"99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796\": container with ID starting with 99ef243052cc5aa0c5dadb115f1eab2092986a9ea5c2bf539cdc588d6eb51796 not found: ID does not exist" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.653454 4797 scope.go:117] "RemoveContainer" containerID="e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050" Jan 04 12:35:09 crc kubenswrapper[4797]: E0104 12:35:09.653867 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050\": container with ID starting with e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050 not found: ID does not exist" containerID="e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.653904 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050"} err="failed to get container status \"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050\": rpc error: code = NotFound desc = could not find container \"e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050\": container with ID starting with e76e291574f7ea65d1949d7d2e47f57034165b2513361783002f624efb3e6050 not found: ID does not exist" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.653928 4797 scope.go:117] "RemoveContainer" containerID="0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c" Jan 04 12:35:09 crc kubenswrapper[4797]: E0104 12:35:09.654169 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c\": container with ID starting with 0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c not found: ID does not exist" containerID="0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c" Jan 04 12:35:09 crc kubenswrapper[4797]: I0104 12:35:09.654202 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c"} err="failed to get container status \"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c\": rpc error: code = NotFound desc = could not find container \"0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c\": container with ID starting with 0f03bb3acd680849f88b90e1b72df2b4e86c2d7a440c44adac0aa66f1e178b9c not found: ID does not exist" Jan 04 12:35:11 crc kubenswrapper[4797]: I0104 12:35:11.491367 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" path="/var/lib/kubelet/pods/a10d82a4-83ae-4f36-a9f6-5d091953763a/volumes" Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.493426 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.494266 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.494332 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.495138 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.495229 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b" gracePeriod=600 Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.652762 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b" exitCode=0 Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.652815 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b"} Jan 04 12:35:19 crc kubenswrapper[4797]: I0104 12:35:19.652905 4797 scope.go:117] "RemoveContainer" containerID="01c8e8e6617d072b3feba770835495db12b9e241a89a13aa08cb1f099de496d3" Jan 04 12:35:20 crc kubenswrapper[4797]: I0104 12:35:20.663947 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a"} Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.755706 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:35:50 crc kubenswrapper[4797]: E0104 12:35:50.756509 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="extract-content" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.756522 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="extract-content" Jan 04 12:35:50 crc kubenswrapper[4797]: E0104 12:35:50.756547 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="extract-utilities" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.756553 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="extract-utilities" Jan 04 12:35:50 crc kubenswrapper[4797]: E0104 12:35:50.756564 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="registry-server" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.756571 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="registry-server" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.756705 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="a10d82a4-83ae-4f36-a9f6-5d091953763a" containerName="registry-server" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.757608 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.768370 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.910763 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.910843 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:50 crc kubenswrapper[4797]: I0104 12:35:50.911158 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmvxs\" (UniqueName: \"kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.012196 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmvxs\" (UniqueName: \"kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.012356 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.012393 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.013077 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.013141 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.051456 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmvxs\" (UniqueName: \"kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs\") pod \"redhat-marketplace-mfcct\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.121749 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.574742 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.952914 4797 generic.go:334] "Generic (PLEG): container finished" podID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerID="7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484" exitCode=0 Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.953017 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerDied","Data":"7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484"} Jan 04 12:35:51 crc kubenswrapper[4797]: I0104 12:35:51.953265 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerStarted","Data":"e49e14cd9631e85bd43d46fca2c1a71ea5f2fabdd41aa49361fd2906947e4b5b"} Jan 04 12:35:52 crc kubenswrapper[4797]: I0104 12:35:52.970751 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerStarted","Data":"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8"} Jan 04 12:35:53 crc kubenswrapper[4797]: I0104 12:35:53.986363 4797 generic.go:334] "Generic (PLEG): container finished" podID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerID="fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8" exitCode=0 Jan 04 12:35:53 crc kubenswrapper[4797]: I0104 12:35:53.986530 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerDied","Data":"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8"} Jan 04 12:35:55 crc kubenswrapper[4797]: I0104 12:35:55.002346 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerStarted","Data":"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6"} Jan 04 12:35:55 crc kubenswrapper[4797]: I0104 12:35:55.035539 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mfcct" podStartSLOduration=2.478803362 podStartE2EDuration="5.035520461s" podCreationTimestamp="2026-01-04 12:35:50 +0000 UTC" firstStartedPulling="2026-01-04 12:35:51.954798733 +0000 UTC m=+2850.811985452" lastFinishedPulling="2026-01-04 12:35:54.511515802 +0000 UTC m=+2853.368702551" observedRunningTime="2026-01-04 12:35:55.034891765 +0000 UTC m=+2853.892078544" watchObservedRunningTime="2026-01-04 12:35:55.035520461 +0000 UTC m=+2853.892707170" Jan 04 12:36:01 crc kubenswrapper[4797]: I0104 12:36:01.122438 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:01 crc kubenswrapper[4797]: I0104 12:36:01.123069 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:01 crc kubenswrapper[4797]: I0104 12:36:01.202472 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:02 crc kubenswrapper[4797]: I0104 12:36:02.154297 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:02 crc kubenswrapper[4797]: I0104 12:36:02.214341 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:36:04 crc kubenswrapper[4797]: I0104 12:36:04.084238 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mfcct" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="registry-server" containerID="cri-o://9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6" gracePeriod=2 Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.027409 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.093448 4797 generic.go:334] "Generic (PLEG): container finished" podID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerID="9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6" exitCode=0 Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.093502 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerDied","Data":"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6"} Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.093532 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfcct" event={"ID":"bb428c2e-7b34-4a62-8ed2-2f1637d1d497","Type":"ContainerDied","Data":"e49e14cd9631e85bd43d46fca2c1a71ea5f2fabdd41aa49361fd2906947e4b5b"} Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.093552 4797 scope.go:117] "RemoveContainer" containerID="9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.093686 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfcct" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.121776 4797 scope.go:117] "RemoveContainer" containerID="fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.146208 4797 scope.go:117] "RemoveContainer" containerID="7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.147724 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities\") pod \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.147838 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmvxs\" (UniqueName: \"kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs\") pod \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.147953 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content\") pod \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\" (UID: \"bb428c2e-7b34-4a62-8ed2-2f1637d1d497\") " Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.149433 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities" (OuterVolumeSpecName: "utilities") pod "bb428c2e-7b34-4a62-8ed2-2f1637d1d497" (UID: "bb428c2e-7b34-4a62-8ed2-2f1637d1d497"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.153776 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs" (OuterVolumeSpecName: "kube-api-access-lmvxs") pod "bb428c2e-7b34-4a62-8ed2-2f1637d1d497" (UID: "bb428c2e-7b34-4a62-8ed2-2f1637d1d497"). InnerVolumeSpecName "kube-api-access-lmvxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.172576 4797 scope.go:117] "RemoveContainer" containerID="9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6" Jan 04 12:36:05 crc kubenswrapper[4797]: E0104 12:36:05.173024 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6\": container with ID starting with 9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6 not found: ID does not exist" containerID="9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.173098 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6"} err="failed to get container status \"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6\": rpc error: code = NotFound desc = could not find container \"9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6\": container with ID starting with 9b32a3ab98cce6dd01fd9e8388552910e86cf18c49baef2c3f8a1c21cce00af6 not found: ID does not exist" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.173119 4797 scope.go:117] "RemoveContainer" containerID="fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8" Jan 04 12:36:05 crc kubenswrapper[4797]: E0104 12:36:05.173385 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8\": container with ID starting with fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8 not found: ID does not exist" containerID="fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.173443 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8"} err="failed to get container status \"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8\": rpc error: code = NotFound desc = could not find container \"fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8\": container with ID starting with fac13629253a64bfabb51a161935a8002f9c056fa877a1c3753fdc6840e5f9f8 not found: ID does not exist" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.173461 4797 scope.go:117] "RemoveContainer" containerID="7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484" Jan 04 12:36:05 crc kubenswrapper[4797]: E0104 12:36:05.173716 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484\": container with ID starting with 7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484 not found: ID does not exist" containerID="7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.173774 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484"} err="failed to get container status \"7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484\": rpc error: code = NotFound desc = could not find container \"7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484\": container with ID starting with 7beb2a5c726024317d7b7837d84bc081d81ff40398c52e65fca1d45fec107484 not found: ID does not exist" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.176501 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb428c2e-7b34-4a62-8ed2-2f1637d1d497" (UID: "bb428c2e-7b34-4a62-8ed2-2f1637d1d497"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.249467 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.249506 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmvxs\" (UniqueName: \"kubernetes.io/projected/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-kube-api-access-lmvxs\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.249522 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb428c2e-7b34-4a62-8ed2-2f1637d1d497-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.432708 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.437429 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfcct"] Jan 04 12:36:05 crc kubenswrapper[4797]: I0104 12:36:05.485626 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" path="/var/lib/kubelet/pods/bb428c2e-7b34-4a62-8ed2-2f1637d1d497/volumes" Jan 04 12:37:19 crc kubenswrapper[4797]: I0104 12:37:19.493296 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:37:19 crc kubenswrapper[4797]: I0104 12:37:19.493866 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:37:49 crc kubenswrapper[4797]: I0104 12:37:49.493538 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:37:49 crc kubenswrapper[4797]: I0104 12:37:49.494176 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:38:19 crc kubenswrapper[4797]: I0104 12:38:19.492835 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:38:19 crc kubenswrapper[4797]: I0104 12:38:19.493954 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:38:19 crc kubenswrapper[4797]: I0104 12:38:19.494180 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:38:19 crc kubenswrapper[4797]: I0104 12:38:19.495839 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:38:19 crc kubenswrapper[4797]: I0104 12:38:19.495939 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" gracePeriod=600 Jan 04 12:38:19 crc kubenswrapper[4797]: E0104 12:38:19.627247 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:38:20 crc kubenswrapper[4797]: I0104 12:38:20.299171 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" exitCode=0 Jan 04 12:38:20 crc kubenswrapper[4797]: I0104 12:38:20.299216 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a"} Jan 04 12:38:20 crc kubenswrapper[4797]: I0104 12:38:20.299301 4797 scope.go:117] "RemoveContainer" containerID="dbd334cf0c36ec14b4d7e9cfdcae0ce10697970b9126dee68b7e8c3ef6ff421b" Jan 04 12:38:20 crc kubenswrapper[4797]: I0104 12:38:20.300029 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:38:20 crc kubenswrapper[4797]: E0104 12:38:20.300419 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.321048 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:24 crc kubenswrapper[4797]: E0104 12:38:24.322206 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="registry-server" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.322230 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="registry-server" Jan 04 12:38:24 crc kubenswrapper[4797]: E0104 12:38:24.322277 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="extract-utilities" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.322291 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="extract-utilities" Jan 04 12:38:24 crc kubenswrapper[4797]: E0104 12:38:24.322324 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="extract-content" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.322338 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="extract-content" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.322588 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb428c2e-7b34-4a62-8ed2-2f1637d1d497" containerName="registry-server" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.324513 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.358478 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.443508 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx4j6\" (UniqueName: \"kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.443591 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.443879 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.545804 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx4j6\" (UniqueName: \"kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.547448 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.548623 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.549236 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.549651 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.569269 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx4j6\" (UniqueName: \"kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6\") pod \"community-operators-25dx9\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:24 crc kubenswrapper[4797]: I0104 12:38:24.651686 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:25 crc kubenswrapper[4797]: I0104 12:38:25.131042 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:25 crc kubenswrapper[4797]: I0104 12:38:25.348708 4797 generic.go:334] "Generic (PLEG): container finished" podID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerID="b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4" exitCode=0 Jan 04 12:38:25 crc kubenswrapper[4797]: I0104 12:38:25.348785 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerDied","Data":"b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4"} Jan 04 12:38:25 crc kubenswrapper[4797]: I0104 12:38:25.348947 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerStarted","Data":"26738d82bb989b89afa9d2dfd22fdd0be2ea09ea1385899a1351374e271583b7"} Jan 04 12:38:27 crc kubenswrapper[4797]: I0104 12:38:27.369770 4797 generic.go:334] "Generic (PLEG): container finished" podID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerID="239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e" exitCode=0 Jan 04 12:38:27 crc kubenswrapper[4797]: I0104 12:38:27.369870 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerDied","Data":"239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e"} Jan 04 12:38:28 crc kubenswrapper[4797]: I0104 12:38:28.384360 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerStarted","Data":"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56"} Jan 04 12:38:28 crc kubenswrapper[4797]: I0104 12:38:28.413377 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-25dx9" podStartSLOduration=1.967021499 podStartE2EDuration="4.413348422s" podCreationTimestamp="2026-01-04 12:38:24 +0000 UTC" firstStartedPulling="2026-01-04 12:38:25.35073018 +0000 UTC m=+3004.207916929" lastFinishedPulling="2026-01-04 12:38:27.797057143 +0000 UTC m=+3006.654243852" observedRunningTime="2026-01-04 12:38:28.407073587 +0000 UTC m=+3007.264260326" watchObservedRunningTime="2026-01-04 12:38:28.413348422 +0000 UTC m=+3007.270535171" Jan 04 12:38:32 crc kubenswrapper[4797]: I0104 12:38:32.474574 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:38:32 crc kubenswrapper[4797]: E0104 12:38:32.475009 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:38:34 crc kubenswrapper[4797]: I0104 12:38:34.652565 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:34 crc kubenswrapper[4797]: I0104 12:38:34.654150 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:34 crc kubenswrapper[4797]: I0104 12:38:34.710834 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:35 crc kubenswrapper[4797]: I0104 12:38:35.596298 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:35 crc kubenswrapper[4797]: I0104 12:38:35.660056 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:37 crc kubenswrapper[4797]: I0104 12:38:37.574945 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-25dx9" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="registry-server" containerID="cri-o://e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56" gracePeriod=2 Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.545180 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.581481 4797 generic.go:334] "Generic (PLEG): container finished" podID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerID="e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56" exitCode=0 Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.581552 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerDied","Data":"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56"} Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.581582 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-25dx9" event={"ID":"98f33ed2-7e60-4ad4-804b-0102390369f3","Type":"ContainerDied","Data":"26738d82bb989b89afa9d2dfd22fdd0be2ea09ea1385899a1351374e271583b7"} Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.581594 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-25dx9" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.581601 4797 scope.go:117] "RemoveContainer" containerID="e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.603766 4797 scope.go:117] "RemoveContainer" containerID="239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.629321 4797 scope.go:117] "RemoveContainer" containerID="b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.660759 4797 scope.go:117] "RemoveContainer" containerID="e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56" Jan 04 12:38:38 crc kubenswrapper[4797]: E0104 12:38:38.661215 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56\": container with ID starting with e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56 not found: ID does not exist" containerID="e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.661301 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56"} err="failed to get container status \"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56\": rpc error: code = NotFound desc = could not find container \"e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56\": container with ID starting with e658bf9c3319879506175ee0b68325bd63c24963749a326a3a2cf6363233ec56 not found: ID does not exist" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.661590 4797 scope.go:117] "RemoveContainer" containerID="239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e" Jan 04 12:38:38 crc kubenswrapper[4797]: E0104 12:38:38.662183 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e\": container with ID starting with 239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e not found: ID does not exist" containerID="239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.662226 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e"} err="failed to get container status \"239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e\": rpc error: code = NotFound desc = could not find container \"239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e\": container with ID starting with 239ee265cbbe465e25c10ffa068cc3b52d79148f8021090fb0b22ccb81b1789e not found: ID does not exist" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.662250 4797 scope.go:117] "RemoveContainer" containerID="b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4" Jan 04 12:38:38 crc kubenswrapper[4797]: E0104 12:38:38.662544 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4\": container with ID starting with b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4 not found: ID does not exist" containerID="b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.662595 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4"} err="failed to get container status \"b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4\": rpc error: code = NotFound desc = could not find container \"b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4\": container with ID starting with b7d9a16eab77e47f4e8c87fe6411c7938faf773f237228e4c288437e8bfff6d4 not found: ID does not exist" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.673403 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nx4j6\" (UniqueName: \"kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6\") pod \"98f33ed2-7e60-4ad4-804b-0102390369f3\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.673551 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities\") pod \"98f33ed2-7e60-4ad4-804b-0102390369f3\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.673675 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content\") pod \"98f33ed2-7e60-4ad4-804b-0102390369f3\" (UID: \"98f33ed2-7e60-4ad4-804b-0102390369f3\") " Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.676020 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities" (OuterVolumeSpecName: "utilities") pod "98f33ed2-7e60-4ad4-804b-0102390369f3" (UID: "98f33ed2-7e60-4ad4-804b-0102390369f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.680269 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6" (OuterVolumeSpecName: "kube-api-access-nx4j6") pod "98f33ed2-7e60-4ad4-804b-0102390369f3" (UID: "98f33ed2-7e60-4ad4-804b-0102390369f3"). InnerVolumeSpecName "kube-api-access-nx4j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.766487 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98f33ed2-7e60-4ad4-804b-0102390369f3" (UID: "98f33ed2-7e60-4ad4-804b-0102390369f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.776033 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.776077 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nx4j6\" (UniqueName: \"kubernetes.io/projected/98f33ed2-7e60-4ad4-804b-0102390369f3-kube-api-access-nx4j6\") on node \"crc\" DevicePath \"\"" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.776141 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f33ed2-7e60-4ad4-804b-0102390369f3-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.938572 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:38 crc kubenswrapper[4797]: I0104 12:38:38.948578 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-25dx9"] Jan 04 12:38:39 crc kubenswrapper[4797]: I0104 12:38:39.489503 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" path="/var/lib/kubelet/pods/98f33ed2-7e60-4ad4-804b-0102390369f3/volumes" Jan 04 12:38:43 crc kubenswrapper[4797]: I0104 12:38:43.474471 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:38:43 crc kubenswrapper[4797]: E0104 12:38:43.475062 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:38:56 crc kubenswrapper[4797]: I0104 12:38:56.475138 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:38:56 crc kubenswrapper[4797]: E0104 12:38:56.476282 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:39:07 crc kubenswrapper[4797]: I0104 12:39:07.475315 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:39:07 crc kubenswrapper[4797]: E0104 12:39:07.476085 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:39:22 crc kubenswrapper[4797]: I0104 12:39:22.474615 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:39:22 crc kubenswrapper[4797]: E0104 12:39:22.475392 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:39:35 crc kubenswrapper[4797]: I0104 12:39:35.474568 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:39:35 crc kubenswrapper[4797]: E0104 12:39:35.475704 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:39:48 crc kubenswrapper[4797]: I0104 12:39:48.474826 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:39:48 crc kubenswrapper[4797]: E0104 12:39:48.475652 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:01 crc kubenswrapper[4797]: I0104 12:40:01.477623 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:40:01 crc kubenswrapper[4797]: E0104 12:40:01.478226 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:16 crc kubenswrapper[4797]: I0104 12:40:16.474315 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:40:16 crc kubenswrapper[4797]: E0104 12:40:16.475293 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:27 crc kubenswrapper[4797]: I0104 12:40:27.475260 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:40:27 crc kubenswrapper[4797]: E0104 12:40:27.476401 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:39 crc kubenswrapper[4797]: I0104 12:40:39.474787 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:40:39 crc kubenswrapper[4797]: E0104 12:40:39.475697 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.204816 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:40:46 crc kubenswrapper[4797]: E0104 12:40:46.206070 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="registry-server" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.206098 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="registry-server" Jan 04 12:40:46 crc kubenswrapper[4797]: E0104 12:40:46.206129 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="extract-content" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.206141 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="extract-content" Jan 04 12:40:46 crc kubenswrapper[4797]: E0104 12:40:46.206162 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="extract-utilities" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.206175 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="extract-utilities" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.206434 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f33ed2-7e60-4ad4-804b-0102390369f3" containerName="registry-server" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.208647 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.224434 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.373590 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.373735 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2zvr\" (UniqueName: \"kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.373973 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.475218 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.475315 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2zvr\" (UniqueName: \"kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.475353 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.475970 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.476037 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.508628 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2zvr\" (UniqueName: \"kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr\") pod \"redhat-operators-kwjwq\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.538972 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.765871 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:40:46 crc kubenswrapper[4797]: W0104 12:40:46.774256 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbac69687_d3f5_426d_aed8_0dc20b78ba9d.slice/crio-1e5f3774910ea3dd49d1be0f91f9edb40f627280f7dc1c0c5798e09d4a02b216 WatchSource:0}: Error finding container 1e5f3774910ea3dd49d1be0f91f9edb40f627280f7dc1c0c5798e09d4a02b216: Status 404 returned error can't find the container with id 1e5f3774910ea3dd49d1be0f91f9edb40f627280f7dc1c0c5798e09d4a02b216 Jan 04 12:40:46 crc kubenswrapper[4797]: I0104 12:40:46.856547 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerStarted","Data":"1e5f3774910ea3dd49d1be0f91f9edb40f627280f7dc1c0c5798e09d4a02b216"} Jan 04 12:40:47 crc kubenswrapper[4797]: I0104 12:40:47.868292 4797 generic.go:334] "Generic (PLEG): container finished" podID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerID="3f5b00113fbe9b02086ddbf515d26720d2c859a90d62fe72b7ce79541b3716ff" exitCode=0 Jan 04 12:40:47 crc kubenswrapper[4797]: I0104 12:40:47.868386 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerDied","Data":"3f5b00113fbe9b02086ddbf515d26720d2c859a90d62fe72b7ce79541b3716ff"} Jan 04 12:40:47 crc kubenswrapper[4797]: I0104 12:40:47.871395 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:40:49 crc kubenswrapper[4797]: I0104 12:40:49.890115 4797 generic.go:334] "Generic (PLEG): container finished" podID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerID="0fc6710988409cbba86b0af61454a8b00efa5ad4a9c66ddb2e3d1fd2454e3a2a" exitCode=0 Jan 04 12:40:49 crc kubenswrapper[4797]: I0104 12:40:49.890244 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerDied","Data":"0fc6710988409cbba86b0af61454a8b00efa5ad4a9c66ddb2e3d1fd2454e3a2a"} Jan 04 12:40:50 crc kubenswrapper[4797]: I0104 12:40:50.905670 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerStarted","Data":"91e42cf7b39e92a770171ca31ec41906af5c5cccd4e7092f46ee8da541f3becb"} Jan 04 12:40:50 crc kubenswrapper[4797]: I0104 12:40:50.950949 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kwjwq" podStartSLOduration=2.350034928 podStartE2EDuration="4.950917374s" podCreationTimestamp="2026-01-04 12:40:46 +0000 UTC" firstStartedPulling="2026-01-04 12:40:47.871193113 +0000 UTC m=+3146.728379822" lastFinishedPulling="2026-01-04 12:40:50.472075549 +0000 UTC m=+3149.329262268" observedRunningTime="2026-01-04 12:40:50.936098906 +0000 UTC m=+3149.793285695" watchObservedRunningTime="2026-01-04 12:40:50.950917374 +0000 UTC m=+3149.808104123" Jan 04 12:40:52 crc kubenswrapper[4797]: I0104 12:40:52.476553 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:40:52 crc kubenswrapper[4797]: E0104 12:40:52.476955 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:40:56 crc kubenswrapper[4797]: I0104 12:40:56.539902 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:56 crc kubenswrapper[4797]: I0104 12:40:56.540590 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:40:57 crc kubenswrapper[4797]: I0104 12:40:57.612591 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kwjwq" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="registry-server" probeResult="failure" output=< Jan 04 12:40:57 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 12:40:57 crc kubenswrapper[4797]: > Jan 04 12:41:05 crc kubenswrapper[4797]: I0104 12:41:05.474841 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:41:05 crc kubenswrapper[4797]: E0104 12:41:05.476224 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:41:06 crc kubenswrapper[4797]: I0104 12:41:06.618462 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:41:06 crc kubenswrapper[4797]: I0104 12:41:06.753124 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:41:06 crc kubenswrapper[4797]: I0104 12:41:06.860545 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:41:08 crc kubenswrapper[4797]: I0104 12:41:08.067961 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kwjwq" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="registry-server" containerID="cri-o://91e42cf7b39e92a770171ca31ec41906af5c5cccd4e7092f46ee8da541f3becb" gracePeriod=2 Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.081650 4797 generic.go:334] "Generic (PLEG): container finished" podID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerID="91e42cf7b39e92a770171ca31ec41906af5c5cccd4e7092f46ee8da541f3becb" exitCode=0 Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.081779 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerDied","Data":"91e42cf7b39e92a770171ca31ec41906af5c5cccd4e7092f46ee8da541f3becb"} Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.626965 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.732817 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content\") pod \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.732913 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2zvr\" (UniqueName: \"kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr\") pod \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.732960 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities\") pod \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\" (UID: \"bac69687-d3f5-426d-aed8-0dc20b78ba9d\") " Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.734744 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities" (OuterVolumeSpecName: "utilities") pod "bac69687-d3f5-426d-aed8-0dc20b78ba9d" (UID: "bac69687-d3f5-426d-aed8-0dc20b78ba9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.742467 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr" (OuterVolumeSpecName: "kube-api-access-z2zvr") pod "bac69687-d3f5-426d-aed8-0dc20b78ba9d" (UID: "bac69687-d3f5-426d-aed8-0dc20b78ba9d"). InnerVolumeSpecName "kube-api-access-z2zvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.834600 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2zvr\" (UniqueName: \"kubernetes.io/projected/bac69687-d3f5-426d-aed8-0dc20b78ba9d-kube-api-access-z2zvr\") on node \"crc\" DevicePath \"\"" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.834709 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.880773 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bac69687-d3f5-426d-aed8-0dc20b78ba9d" (UID: "bac69687-d3f5-426d-aed8-0dc20b78ba9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:41:09 crc kubenswrapper[4797]: I0104 12:41:09.936136 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac69687-d3f5-426d-aed8-0dc20b78ba9d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.096191 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwjwq" event={"ID":"bac69687-d3f5-426d-aed8-0dc20b78ba9d","Type":"ContainerDied","Data":"1e5f3774910ea3dd49d1be0f91f9edb40f627280f7dc1c0c5798e09d4a02b216"} Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.096250 4797 scope.go:117] "RemoveContainer" containerID="91e42cf7b39e92a770171ca31ec41906af5c5cccd4e7092f46ee8da541f3becb" Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.096293 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwjwq" Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.142290 4797 scope.go:117] "RemoveContainer" containerID="0fc6710988409cbba86b0af61454a8b00efa5ad4a9c66ddb2e3d1fd2454e3a2a" Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.161128 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.170801 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kwjwq"] Jan 04 12:41:10 crc kubenswrapper[4797]: I0104 12:41:10.188339 4797 scope.go:117] "RemoveContainer" containerID="3f5b00113fbe9b02086ddbf515d26720d2c859a90d62fe72b7ce79541b3716ff" Jan 04 12:41:11 crc kubenswrapper[4797]: I0104 12:41:11.489028 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" path="/var/lib/kubelet/pods/bac69687-d3f5-426d-aed8-0dc20b78ba9d/volumes" Jan 04 12:41:16 crc kubenswrapper[4797]: I0104 12:41:16.475107 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:41:16 crc kubenswrapper[4797]: E0104 12:41:16.475790 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:41:28 crc kubenswrapper[4797]: I0104 12:41:28.474457 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:41:28 crc kubenswrapper[4797]: E0104 12:41:28.475252 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:41:41 crc kubenswrapper[4797]: I0104 12:41:41.480080 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:41:41 crc kubenswrapper[4797]: E0104 12:41:41.480889 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:41:55 crc kubenswrapper[4797]: I0104 12:41:55.474584 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:41:55 crc kubenswrapper[4797]: E0104 12:41:55.475363 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:42:09 crc kubenswrapper[4797]: I0104 12:42:09.474975 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:42:09 crc kubenswrapper[4797]: E0104 12:42:09.476143 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:42:23 crc kubenswrapper[4797]: I0104 12:42:23.474405 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:42:23 crc kubenswrapper[4797]: E0104 12:42:23.475205 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:42:34 crc kubenswrapper[4797]: I0104 12:42:34.474665 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:42:34 crc kubenswrapper[4797]: E0104 12:42:34.475838 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:42:49 crc kubenswrapper[4797]: I0104 12:42:49.474758 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:42:49 crc kubenswrapper[4797]: E0104 12:42:49.476353 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:43:04 crc kubenswrapper[4797]: I0104 12:43:04.474106 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:43:04 crc kubenswrapper[4797]: E0104 12:43:04.475250 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:43:16 crc kubenswrapper[4797]: I0104 12:43:16.473810 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:43:16 crc kubenswrapper[4797]: E0104 12:43:16.474804 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:43:27 crc kubenswrapper[4797]: I0104 12:43:27.474483 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:43:28 crc kubenswrapper[4797]: I0104 12:43:28.643591 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068"} Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.179402 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt"] Jan 04 12:45:00 crc kubenswrapper[4797]: E0104 12:45:00.182491 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="extract-utilities" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.182736 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="extract-utilities" Jan 04 12:45:00 crc kubenswrapper[4797]: E0104 12:45:00.182938 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="extract-content" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.183157 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="extract-content" Jan 04 12:45:00 crc kubenswrapper[4797]: E0104 12:45:00.183342 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.183466 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.183971 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac69687-d3f5-426d-aed8-0dc20b78ba9d" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.185351 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.188507 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt"] Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.192706 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.193069 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.325935 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.326028 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzk8l\" (UniqueName: \"kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.326343 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.427639 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.427707 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.427793 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzk8l\" (UniqueName: \"kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.429109 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.436719 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.444227 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzk8l\" (UniqueName: \"kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l\") pod \"collect-profiles-29458845-4dljt\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.515509 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:00 crc kubenswrapper[4797]: I0104 12:45:00.947345 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt"] Jan 04 12:45:01 crc kubenswrapper[4797]: I0104 12:45:01.476618 4797 generic.go:334] "Generic (PLEG): container finished" podID="02139d0a-c4e4-4731-b375-1b0009e65056" containerID="6838837d8cd0886b4add59c42e38911da6bbffee64e4bcd7b37ee3ba9f7a7159" exitCode=0 Jan 04 12:45:01 crc kubenswrapper[4797]: I0104 12:45:01.481918 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" event={"ID":"02139d0a-c4e4-4731-b375-1b0009e65056","Type":"ContainerDied","Data":"6838837d8cd0886b4add59c42e38911da6bbffee64e4bcd7b37ee3ba9f7a7159"} Jan 04 12:45:01 crc kubenswrapper[4797]: I0104 12:45:01.481961 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" event={"ID":"02139d0a-c4e4-4731-b375-1b0009e65056","Type":"ContainerStarted","Data":"005950a11aa09cfe9fe3c512dadaa675d7cc490e90b53cfe16d6c530014f3991"} Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.835701 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.974413 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzk8l\" (UniqueName: \"kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l\") pod \"02139d0a-c4e4-4731-b375-1b0009e65056\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.974494 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume\") pod \"02139d0a-c4e4-4731-b375-1b0009e65056\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.974726 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume\") pod \"02139d0a-c4e4-4731-b375-1b0009e65056\" (UID: \"02139d0a-c4e4-4731-b375-1b0009e65056\") " Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.975869 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume" (OuterVolumeSpecName: "config-volume") pod "02139d0a-c4e4-4731-b375-1b0009e65056" (UID: "02139d0a-c4e4-4731-b375-1b0009e65056"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.984400 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "02139d0a-c4e4-4731-b375-1b0009e65056" (UID: "02139d0a-c4e4-4731-b375-1b0009e65056"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:45:02 crc kubenswrapper[4797]: I0104 12:45:02.984565 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l" (OuterVolumeSpecName: "kube-api-access-pzk8l") pod "02139d0a-c4e4-4731-b375-1b0009e65056" (UID: "02139d0a-c4e4-4731-b375-1b0009e65056"). InnerVolumeSpecName "kube-api-access-pzk8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.076358 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02139d0a-c4e4-4731-b375-1b0009e65056-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.076588 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzk8l\" (UniqueName: \"kubernetes.io/projected/02139d0a-c4e4-4731-b375-1b0009e65056-kube-api-access-pzk8l\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.076709 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02139d0a-c4e4-4731-b375-1b0009e65056-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.498895 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" event={"ID":"02139d0a-c4e4-4731-b375-1b0009e65056","Type":"ContainerDied","Data":"005950a11aa09cfe9fe3c512dadaa675d7cc490e90b53cfe16d6c530014f3991"} Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.498934 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="005950a11aa09cfe9fe3c512dadaa675d7cc490e90b53cfe16d6c530014f3991" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.499064 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-4dljt" Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.936742 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj"] Jan 04 12:45:03 crc kubenswrapper[4797]: I0104 12:45:03.941972 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-6hplj"] Jan 04 12:45:05 crc kubenswrapper[4797]: I0104 12:45:05.488303 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6916df2d-860b-434d-8949-d624d7d47b9b" path="/var/lib/kubelet/pods/6916df2d-860b-434d-8949-d624d7d47b9b/volumes" Jan 04 12:45:26 crc kubenswrapper[4797]: I0104 12:45:26.773218 4797 scope.go:117] "RemoveContainer" containerID="078cdd04b1cb894e37a2ab7c8703bf99faa029c2fc7efa543e45f5c593c405f9" Jan 04 12:45:49 crc kubenswrapper[4797]: I0104 12:45:49.493469 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:45:49 crc kubenswrapper[4797]: I0104 12:45:49.494065 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:46:19 crc kubenswrapper[4797]: I0104 12:46:19.493091 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:46:19 crc kubenswrapper[4797]: I0104 12:46:19.494003 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.323772 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:20 crc kubenswrapper[4797]: E0104 12:46:20.324256 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02139d0a-c4e4-4731-b375-1b0009e65056" containerName="collect-profiles" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.324279 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="02139d0a-c4e4-4731-b375-1b0009e65056" containerName="collect-profiles" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.324501 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="02139d0a-c4e4-4731-b375-1b0009e65056" containerName="collect-profiles" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.325779 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.352357 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.453628 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.453757 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm9qj\" (UniqueName: \"kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.453784 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.555094 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.555703 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.556535 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm9qj\" (UniqueName: \"kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.556606 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.557408 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.580557 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm9qj\" (UniqueName: \"kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj\") pod \"certified-operators-tpsz4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:20 crc kubenswrapper[4797]: I0104 12:46:20.651186 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:21 crc kubenswrapper[4797]: I0104 12:46:21.274969 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.181245 4797 generic.go:334] "Generic (PLEG): container finished" podID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerID="1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05" exitCode=0 Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.181326 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerDied","Data":"1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05"} Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.181686 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerStarted","Data":"f4f545e7d7244d0b839e2f5a23206d874b124f2627dea3fa0a9961b7e9f49b74"} Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.184074 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.726117 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.728636 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.744313 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.788331 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.788431 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.889781 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.890301 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5txwz\" (UniqueName: \"kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.890378 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.890377 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.890674 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:22 crc kubenswrapper[4797]: I0104 12:46:22.992286 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5txwz\" (UniqueName: \"kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:23 crc kubenswrapper[4797]: I0104 12:46:23.028372 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5txwz\" (UniqueName: \"kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz\") pod \"redhat-marketplace-2sxrc\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:23 crc kubenswrapper[4797]: I0104 12:46:23.056507 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:23 crc kubenswrapper[4797]: I0104 12:46:23.223453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerStarted","Data":"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367"} Jan 04 12:46:23 crc kubenswrapper[4797]: I0104 12:46:23.363884 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:24 crc kubenswrapper[4797]: I0104 12:46:24.238072 4797 generic.go:334] "Generic (PLEG): container finished" podID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerID="21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367" exitCode=0 Jan 04 12:46:24 crc kubenswrapper[4797]: I0104 12:46:24.238137 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerDied","Data":"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367"} Jan 04 12:46:24 crc kubenswrapper[4797]: I0104 12:46:24.243936 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerDied","Data":"47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8"} Jan 04 12:46:24 crc kubenswrapper[4797]: I0104 12:46:24.243550 4797 generic.go:334] "Generic (PLEG): container finished" podID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerID="47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8" exitCode=0 Jan 04 12:46:24 crc kubenswrapper[4797]: I0104 12:46:24.244214 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerStarted","Data":"3d06b2d044ac100cb7d41b3166b6654ba0c24f387cc934a2ce1c43d4b42c765e"} Jan 04 12:46:25 crc kubenswrapper[4797]: I0104 12:46:25.256620 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerStarted","Data":"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f"} Jan 04 12:46:25 crc kubenswrapper[4797]: I0104 12:46:25.259317 4797 generic.go:334] "Generic (PLEG): container finished" podID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerID="217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09" exitCode=0 Jan 04 12:46:25 crc kubenswrapper[4797]: I0104 12:46:25.259373 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerDied","Data":"217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09"} Jan 04 12:46:25 crc kubenswrapper[4797]: I0104 12:46:25.277710 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tpsz4" podStartSLOduration=2.741041172 podStartE2EDuration="5.27769239s" podCreationTimestamp="2026-01-04 12:46:20 +0000 UTC" firstStartedPulling="2026-01-04 12:46:22.183589285 +0000 UTC m=+3481.040776024" lastFinishedPulling="2026-01-04 12:46:24.720240503 +0000 UTC m=+3483.577427242" observedRunningTime="2026-01-04 12:46:25.27502012 +0000 UTC m=+3484.132206869" watchObservedRunningTime="2026-01-04 12:46:25.27769239 +0000 UTC m=+3484.134879099" Jan 04 12:46:26 crc kubenswrapper[4797]: I0104 12:46:26.268074 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerStarted","Data":"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc"} Jan 04 12:46:26 crc kubenswrapper[4797]: I0104 12:46:26.292550 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2sxrc" podStartSLOduration=2.799162263 podStartE2EDuration="4.292531573s" podCreationTimestamp="2026-01-04 12:46:22 +0000 UTC" firstStartedPulling="2026-01-04 12:46:24.247328702 +0000 UTC m=+3483.104515451" lastFinishedPulling="2026-01-04 12:46:25.740698052 +0000 UTC m=+3484.597884761" observedRunningTime="2026-01-04 12:46:26.290437529 +0000 UTC m=+3485.147624238" watchObservedRunningTime="2026-01-04 12:46:26.292531573 +0000 UTC m=+3485.149718282" Jan 04 12:46:30 crc kubenswrapper[4797]: I0104 12:46:30.651367 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:30 crc kubenswrapper[4797]: I0104 12:46:30.651669 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:30 crc kubenswrapper[4797]: I0104 12:46:30.711842 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:31 crc kubenswrapper[4797]: I0104 12:46:31.394736 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:31 crc kubenswrapper[4797]: I0104 12:46:31.506349 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.056980 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.057121 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.107387 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.325093 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tpsz4" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="registry-server" containerID="cri-o://5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f" gracePeriod=2 Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.379359 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:33 crc kubenswrapper[4797]: I0104 12:46:33.912670 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.332428 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.333917 4797 generic.go:334] "Generic (PLEG): container finished" podID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerID="5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f" exitCode=0 Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.334003 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerDied","Data":"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f"} Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.334067 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tpsz4" event={"ID":"f27fea96-9a08-41ff-a655-c3671d36f9a4","Type":"ContainerDied","Data":"f4f545e7d7244d0b839e2f5a23206d874b124f2627dea3fa0a9961b7e9f49b74"} Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.334097 4797 scope.go:117] "RemoveContainer" containerID="5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.364417 4797 scope.go:117] "RemoveContainer" containerID="21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.388865 4797 scope.go:117] "RemoveContainer" containerID="1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.445044 4797 scope.go:117] "RemoveContainer" containerID="5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f" Jan 04 12:46:34 crc kubenswrapper[4797]: E0104 12:46:34.446417 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f\": container with ID starting with 5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f not found: ID does not exist" containerID="5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.446449 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f"} err="failed to get container status \"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f\": rpc error: code = NotFound desc = could not find container \"5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f\": container with ID starting with 5210c7d066ee06df2b08e97df4eb904f491d40891774cf218dd35112ea58534f not found: ID does not exist" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.446471 4797 scope.go:117] "RemoveContainer" containerID="21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367" Jan 04 12:46:34 crc kubenswrapper[4797]: E0104 12:46:34.452454 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367\": container with ID starting with 21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367 not found: ID does not exist" containerID="21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.452495 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367"} err="failed to get container status \"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367\": rpc error: code = NotFound desc = could not find container \"21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367\": container with ID starting with 21f3b854eebb28ff926a7852df40dd71857337b1f73fe3af9c167651d7058367 not found: ID does not exist" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.452521 4797 scope.go:117] "RemoveContainer" containerID="1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05" Jan 04 12:46:34 crc kubenswrapper[4797]: E0104 12:46:34.456367 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05\": container with ID starting with 1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05 not found: ID does not exist" containerID="1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.456401 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05"} err="failed to get container status \"1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05\": rpc error: code = NotFound desc = could not find container \"1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05\": container with ID starting with 1bd0f05722bed51c7b362b687dd415055901bbb48c6339cccd47ca599ef23b05 not found: ID does not exist" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.465509 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content\") pod \"f27fea96-9a08-41ff-a655-c3671d36f9a4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.465622 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm9qj\" (UniqueName: \"kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj\") pod \"f27fea96-9a08-41ff-a655-c3671d36f9a4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.465666 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities\") pod \"f27fea96-9a08-41ff-a655-c3671d36f9a4\" (UID: \"f27fea96-9a08-41ff-a655-c3671d36f9a4\") " Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.484241 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities" (OuterVolumeSpecName: "utilities") pod "f27fea96-9a08-41ff-a655-c3671d36f9a4" (UID: "f27fea96-9a08-41ff-a655-c3671d36f9a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.496304 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj" (OuterVolumeSpecName: "kube-api-access-xm9qj") pod "f27fea96-9a08-41ff-a655-c3671d36f9a4" (UID: "f27fea96-9a08-41ff-a655-c3671d36f9a4"). InnerVolumeSpecName "kube-api-access-xm9qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.535471 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f27fea96-9a08-41ff-a655-c3671d36f9a4" (UID: "f27fea96-9a08-41ff-a655-c3671d36f9a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.566838 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.566872 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27fea96-9a08-41ff-a655-c3671d36f9a4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:34 crc kubenswrapper[4797]: I0104 12:46:34.566884 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm9qj\" (UniqueName: \"kubernetes.io/projected/f27fea96-9a08-41ff-a655-c3671d36f9a4-kube-api-access-xm9qj\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:35 crc kubenswrapper[4797]: I0104 12:46:35.342886 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tpsz4" Jan 04 12:46:35 crc kubenswrapper[4797]: I0104 12:46:35.343118 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2sxrc" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="registry-server" containerID="cri-o://c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc" gracePeriod=2 Jan 04 12:46:35 crc kubenswrapper[4797]: I0104 12:46:35.403625 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:35 crc kubenswrapper[4797]: I0104 12:46:35.413135 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tpsz4"] Jan 04 12:46:35 crc kubenswrapper[4797]: I0104 12:46:35.491220 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" path="/var/lib/kubelet/pods/f27fea96-9a08-41ff-a655-c3671d36f9a4/volumes" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.273922 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.354148 4797 generic.go:334] "Generic (PLEG): container finished" podID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerID="c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc" exitCode=0 Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.354174 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sxrc" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.354167 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerDied","Data":"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc"} Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.354246 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sxrc" event={"ID":"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08","Type":"ContainerDied","Data":"3d06b2d044ac100cb7d41b3166b6654ba0c24f387cc934a2ce1c43d4b42c765e"} Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.354268 4797 scope.go:117] "RemoveContainer" containerID="c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.375297 4797 scope.go:117] "RemoveContainer" containerID="217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.394356 4797 scope.go:117] "RemoveContainer" containerID="47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.401506 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5txwz\" (UniqueName: \"kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz\") pod \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.401589 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content\") pod \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.401742 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities\") pod \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\" (UID: \"99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08\") " Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.403453 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities" (OuterVolumeSpecName: "utilities") pod "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" (UID: "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.411122 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz" (OuterVolumeSpecName: "kube-api-access-5txwz") pod "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" (UID: "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08"). InnerVolumeSpecName "kube-api-access-5txwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.437611 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" (UID: "99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.455323 4797 scope.go:117] "RemoveContainer" containerID="c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc" Jan 04 12:46:36 crc kubenswrapper[4797]: E0104 12:46:36.455933 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc\": container with ID starting with c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc not found: ID does not exist" containerID="c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.456051 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc"} err="failed to get container status \"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc\": rpc error: code = NotFound desc = could not find container \"c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc\": container with ID starting with c5dcc33af78266b9ed833d5fceb08a7536c57d7209bfb881890d30e453d4c9fc not found: ID does not exist" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.456102 4797 scope.go:117] "RemoveContainer" containerID="217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09" Jan 04 12:46:36 crc kubenswrapper[4797]: E0104 12:46:36.456699 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09\": container with ID starting with 217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09 not found: ID does not exist" containerID="217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.456742 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09"} err="failed to get container status \"217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09\": rpc error: code = NotFound desc = could not find container \"217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09\": container with ID starting with 217f3854925068e2d7a1b9fe16fc390a1d98ea5b20cc881bce225d9e032a4b09 not found: ID does not exist" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.456769 4797 scope.go:117] "RemoveContainer" containerID="47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8" Jan 04 12:46:36 crc kubenswrapper[4797]: E0104 12:46:36.457196 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8\": container with ID starting with 47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8 not found: ID does not exist" containerID="47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.457264 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8"} err="failed to get container status \"47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8\": rpc error: code = NotFound desc = could not find container \"47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8\": container with ID starting with 47d7b923407a1aa642c56f5a49141fe9663bf2a06f05ad87f429f369251c4fd8 not found: ID does not exist" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.503677 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.503741 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.503763 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5txwz\" (UniqueName: \"kubernetes.io/projected/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08-kube-api-access-5txwz\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.708344 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:36 crc kubenswrapper[4797]: I0104 12:46:36.715626 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sxrc"] Jan 04 12:46:37 crc kubenswrapper[4797]: I0104 12:46:37.485953 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" path="/var/lib/kubelet/pods/99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08/volumes" Jan 04 12:46:49 crc kubenswrapper[4797]: I0104 12:46:49.492974 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:46:49 crc kubenswrapper[4797]: I0104 12:46:49.493699 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:46:49 crc kubenswrapper[4797]: I0104 12:46:49.500064 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:46:49 crc kubenswrapper[4797]: I0104 12:46:49.501153 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:46:49 crc kubenswrapper[4797]: I0104 12:46:49.501473 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068" gracePeriod=600 Jan 04 12:46:50 crc kubenswrapper[4797]: I0104 12:46:50.494095 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068" exitCode=0 Jan 04 12:46:50 crc kubenswrapper[4797]: I0104 12:46:50.494166 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068"} Jan 04 12:46:50 crc kubenswrapper[4797]: I0104 12:46:50.494511 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8"} Jan 04 12:46:50 crc kubenswrapper[4797]: I0104 12:46:50.494550 4797 scope.go:117] "RemoveContainer" containerID="18159de6b104500cc29b08a0e31428f47025aa7038a71ee0703c851f1b759b1a" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.250292 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251404 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251429 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251461 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="extract-content" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251474 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="extract-content" Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251501 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="extract-content" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251517 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="extract-content" Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251548 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="extract-utilities" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251565 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="extract-utilities" Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251618 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251637 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: E0104 12:48:34.251657 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="extract-utilities" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251670 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="extract-utilities" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.251936 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="99ec2f5c-8294-4bcf-9e5f-ac9ea83b8f08" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.252190 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="f27fea96-9a08-41ff-a655-c3671d36f9a4" containerName="registry-server" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.254222 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.270777 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.329183 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g86ln\" (UniqueName: \"kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.329338 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.329407 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.431110 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.431183 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.431288 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g86ln\" (UniqueName: \"kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.431887 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.432025 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.455139 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g86ln\" (UniqueName: \"kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln\") pod \"community-operators-vkkpt\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.571224 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:34 crc kubenswrapper[4797]: I0104 12:48:34.861220 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:35 crc kubenswrapper[4797]: I0104 12:48:35.389360 4797 generic.go:334] "Generic (PLEG): container finished" podID="631b3022-062e-4bcd-99c5-b11c13d21999" containerID="3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0" exitCode=0 Jan 04 12:48:35 crc kubenswrapper[4797]: I0104 12:48:35.389427 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerDied","Data":"3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0"} Jan 04 12:48:35 crc kubenswrapper[4797]: I0104 12:48:35.389465 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerStarted","Data":"66ccc39fe40989950e619aeaab229328b9de497c015bc7afe575b988d9931154"} Jan 04 12:48:36 crc kubenswrapper[4797]: I0104 12:48:36.404724 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerStarted","Data":"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293"} Jan 04 12:48:37 crc kubenswrapper[4797]: I0104 12:48:37.417264 4797 generic.go:334] "Generic (PLEG): container finished" podID="631b3022-062e-4bcd-99c5-b11c13d21999" containerID="90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293" exitCode=0 Jan 04 12:48:37 crc kubenswrapper[4797]: I0104 12:48:37.417399 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerDied","Data":"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293"} Jan 04 12:48:38 crc kubenswrapper[4797]: I0104 12:48:38.428532 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerStarted","Data":"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c"} Jan 04 12:48:38 crc kubenswrapper[4797]: I0104 12:48:38.457155 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vkkpt" podStartSLOduration=2.069780405 podStartE2EDuration="4.457098365s" podCreationTimestamp="2026-01-04 12:48:34 +0000 UTC" firstStartedPulling="2026-01-04 12:48:35.391618487 +0000 UTC m=+3614.248805236" lastFinishedPulling="2026-01-04 12:48:37.778936447 +0000 UTC m=+3616.636123196" observedRunningTime="2026-01-04 12:48:38.454739423 +0000 UTC m=+3617.311926152" watchObservedRunningTime="2026-01-04 12:48:38.457098365 +0000 UTC m=+3617.314285084" Jan 04 12:48:44 crc kubenswrapper[4797]: I0104 12:48:44.572260 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:44 crc kubenswrapper[4797]: I0104 12:48:44.573073 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:44 crc kubenswrapper[4797]: I0104 12:48:44.655617 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:45 crc kubenswrapper[4797]: I0104 12:48:45.567118 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:45 crc kubenswrapper[4797]: I0104 12:48:45.632193 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:47 crc kubenswrapper[4797]: I0104 12:48:47.503647 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vkkpt" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="registry-server" containerID="cri-o://a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c" gracePeriod=2 Jan 04 12:48:47 crc kubenswrapper[4797]: I0104 12:48:47.993066 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.139557 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities\") pod \"631b3022-062e-4bcd-99c5-b11c13d21999\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.139630 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content\") pod \"631b3022-062e-4bcd-99c5-b11c13d21999\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.139741 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g86ln\" (UniqueName: \"kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln\") pod \"631b3022-062e-4bcd-99c5-b11c13d21999\" (UID: \"631b3022-062e-4bcd-99c5-b11c13d21999\") " Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.141160 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities" (OuterVolumeSpecName: "utilities") pod "631b3022-062e-4bcd-99c5-b11c13d21999" (UID: "631b3022-062e-4bcd-99c5-b11c13d21999"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.148216 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln" (OuterVolumeSpecName: "kube-api-access-g86ln") pod "631b3022-062e-4bcd-99c5-b11c13d21999" (UID: "631b3022-062e-4bcd-99c5-b11c13d21999"). InnerVolumeSpecName "kube-api-access-g86ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.213657 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "631b3022-062e-4bcd-99c5-b11c13d21999" (UID: "631b3022-062e-4bcd-99c5-b11c13d21999"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.241650 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.241680 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/631b3022-062e-4bcd-99c5-b11c13d21999-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.241694 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g86ln\" (UniqueName: \"kubernetes.io/projected/631b3022-062e-4bcd-99c5-b11c13d21999-kube-api-access-g86ln\") on node \"crc\" DevicePath \"\"" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.513941 4797 generic.go:334] "Generic (PLEG): container finished" podID="631b3022-062e-4bcd-99c5-b11c13d21999" containerID="a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c" exitCode=0 Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.514405 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerDied","Data":"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c"} Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.514457 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vkkpt" event={"ID":"631b3022-062e-4bcd-99c5-b11c13d21999","Type":"ContainerDied","Data":"66ccc39fe40989950e619aeaab229328b9de497c015bc7afe575b988d9931154"} Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.514490 4797 scope.go:117] "RemoveContainer" containerID="a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.514511 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vkkpt" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.597957 4797 scope.go:117] "RemoveContainer" containerID="90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.619086 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.626393 4797 scope.go:117] "RemoveContainer" containerID="3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.626953 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vkkpt"] Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.651212 4797 scope.go:117] "RemoveContainer" containerID="a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c" Jan 04 12:48:48 crc kubenswrapper[4797]: E0104 12:48:48.651827 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c\": container with ID starting with a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c not found: ID does not exist" containerID="a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.651917 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c"} err="failed to get container status \"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c\": rpc error: code = NotFound desc = could not find container \"a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c\": container with ID starting with a13f43e0d5be63ed526d117d0dd64d5baa5cec41c13a9a959a73ce6a7ff1a93c not found: ID does not exist" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.651973 4797 scope.go:117] "RemoveContainer" containerID="90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293" Jan 04 12:48:48 crc kubenswrapper[4797]: E0104 12:48:48.652270 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293\": container with ID starting with 90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293 not found: ID does not exist" containerID="90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.652309 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293"} err="failed to get container status \"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293\": rpc error: code = NotFound desc = could not find container \"90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293\": container with ID starting with 90f42e5fe510668ba3ecfac9f06099adefc2fa6b09a8e915f8a7a31a656d2293 not found: ID does not exist" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.652328 4797 scope.go:117] "RemoveContainer" containerID="3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0" Jan 04 12:48:48 crc kubenswrapper[4797]: E0104 12:48:48.652781 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0\": container with ID starting with 3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0 not found: ID does not exist" containerID="3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0" Jan 04 12:48:48 crc kubenswrapper[4797]: I0104 12:48:48.652808 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0"} err="failed to get container status \"3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0\": rpc error: code = NotFound desc = could not find container \"3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0\": container with ID starting with 3250ffa6e8f9bf6eec56be5bd8e4009492fb00fb9abe8aad665fe0ae256733e0 not found: ID does not exist" Jan 04 12:48:49 crc kubenswrapper[4797]: I0104 12:48:49.485104 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" path="/var/lib/kubelet/pods/631b3022-062e-4bcd-99c5-b11c13d21999/volumes" Jan 04 12:48:49 crc kubenswrapper[4797]: I0104 12:48:49.493241 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:48:49 crc kubenswrapper[4797]: I0104 12:48:49.493301 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:49:19 crc kubenswrapper[4797]: I0104 12:49:19.493858 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:49:19 crc kubenswrapper[4797]: I0104 12:49:19.495307 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:49:49 crc kubenswrapper[4797]: I0104 12:49:49.493325 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:49:49 crc kubenswrapper[4797]: I0104 12:49:49.494248 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:49:49 crc kubenswrapper[4797]: I0104 12:49:49.494317 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:49:49 crc kubenswrapper[4797]: I0104 12:49:49.495279 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:49:49 crc kubenswrapper[4797]: I0104 12:49:49.495396 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" gracePeriod=600 Jan 04 12:49:49 crc kubenswrapper[4797]: E0104 12:49:49.624383 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:49:50 crc kubenswrapper[4797]: I0104 12:49:50.155424 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" exitCode=0 Jan 04 12:49:50 crc kubenswrapper[4797]: I0104 12:49:50.155509 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8"} Jan 04 12:49:50 crc kubenswrapper[4797]: I0104 12:49:50.155921 4797 scope.go:117] "RemoveContainer" containerID="6478250db58a1615c735960e7ee8374d3b63513c2d78d4d554d80730aad1c068" Jan 04 12:49:50 crc kubenswrapper[4797]: I0104 12:49:50.156736 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:49:50 crc kubenswrapper[4797]: E0104 12:49:50.157291 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:50:04 crc kubenswrapper[4797]: I0104 12:50:04.473963 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:50:04 crc kubenswrapper[4797]: E0104 12:50:04.474925 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:50:15 crc kubenswrapper[4797]: I0104 12:50:15.474710 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:50:15 crc kubenswrapper[4797]: E0104 12:50:15.475623 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:50:30 crc kubenswrapper[4797]: I0104 12:50:30.474485 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:50:30 crc kubenswrapper[4797]: E0104 12:50:30.475508 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:50:41 crc kubenswrapper[4797]: I0104 12:50:41.481542 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:50:41 crc kubenswrapper[4797]: E0104 12:50:41.483234 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:50:56 crc kubenswrapper[4797]: I0104 12:50:56.475311 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:50:56 crc kubenswrapper[4797]: E0104 12:50:56.476531 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:51:11 crc kubenswrapper[4797]: I0104 12:51:11.481193 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:51:11 crc kubenswrapper[4797]: E0104 12:51:11.482241 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:51:24 crc kubenswrapper[4797]: I0104 12:51:24.474721 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:51:24 crc kubenswrapper[4797]: E0104 12:51:24.476114 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:51:35 crc kubenswrapper[4797]: I0104 12:51:35.474348 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:51:35 crc kubenswrapper[4797]: E0104 12:51:35.474955 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:51:47 crc kubenswrapper[4797]: I0104 12:51:47.474544 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:51:47 crc kubenswrapper[4797]: E0104 12:51:47.475693 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:51:59 crc kubenswrapper[4797]: I0104 12:51:59.474135 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:51:59 crc kubenswrapper[4797]: E0104 12:51:59.475100 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:52:12 crc kubenswrapper[4797]: I0104 12:52:12.474630 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:52:12 crc kubenswrapper[4797]: E0104 12:52:12.475700 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:52:27 crc kubenswrapper[4797]: I0104 12:52:27.474964 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:52:27 crc kubenswrapper[4797]: E0104 12:52:27.476156 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:52:38 crc kubenswrapper[4797]: I0104 12:52:38.474433 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:52:38 crc kubenswrapper[4797]: E0104 12:52:38.475810 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:52:50 crc kubenswrapper[4797]: I0104 12:52:50.474585 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:52:50 crc kubenswrapper[4797]: E0104 12:52:50.475768 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:53:03 crc kubenswrapper[4797]: I0104 12:53:03.474132 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:53:03 crc kubenswrapper[4797]: E0104 12:53:03.474771 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:53:15 crc kubenswrapper[4797]: I0104 12:53:15.474623 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:53:15 crc kubenswrapper[4797]: E0104 12:53:15.475615 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:53:30 crc kubenswrapper[4797]: I0104 12:53:30.474156 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:53:30 crc kubenswrapper[4797]: E0104 12:53:30.475269 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:53:42 crc kubenswrapper[4797]: I0104 12:53:42.474460 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:53:42 crc kubenswrapper[4797]: E0104 12:53:42.475206 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:53:57 crc kubenswrapper[4797]: I0104 12:53:57.474964 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:53:57 crc kubenswrapper[4797]: E0104 12:53:57.476227 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:54:11 crc kubenswrapper[4797]: I0104 12:54:11.480344 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:54:11 crc kubenswrapper[4797]: E0104 12:54:11.481145 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:54:22 crc kubenswrapper[4797]: I0104 12:54:22.475251 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:54:22 crc kubenswrapper[4797]: E0104 12:54:22.476564 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:54:33 crc kubenswrapper[4797]: I0104 12:54:33.473958 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:54:33 crc kubenswrapper[4797]: E0104 12:54:33.475369 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:54:47 crc kubenswrapper[4797]: I0104 12:54:47.474546 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:54:47 crc kubenswrapper[4797]: E0104 12:54:47.475460 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 12:54:58 crc kubenswrapper[4797]: I0104 12:54:58.474919 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 12:54:59 crc kubenswrapper[4797]: I0104 12:54:59.310422 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca"} Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.531854 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:17 crc kubenswrapper[4797]: E0104 12:56:17.532770 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="registry-server" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.532784 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="registry-server" Jan 04 12:56:17 crc kubenswrapper[4797]: E0104 12:56:17.532800 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="extract-utilities" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.532806 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="extract-utilities" Jan 04 12:56:17 crc kubenswrapper[4797]: E0104 12:56:17.532823 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="extract-content" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.532830 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="extract-content" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.532958 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="631b3022-062e-4bcd-99c5-b11c13d21999" containerName="registry-server" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.533922 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.558264 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.703708 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.703814 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd7gz\" (UniqueName: \"kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.703909 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.805589 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.805706 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd7gz\" (UniqueName: \"kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.805753 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.806308 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.806648 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.834367 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd7gz\" (UniqueName: \"kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz\") pod \"redhat-operators-zq4r2\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:17 crc kubenswrapper[4797]: I0104 12:56:17.886823 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:18 crc kubenswrapper[4797]: I0104 12:56:18.313247 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:18 crc kubenswrapper[4797]: I0104 12:56:18.987370 4797 generic.go:334] "Generic (PLEG): container finished" podID="19d69fb3-053e-4087-b878-af2c6010741c" containerID="274e1703a78f52e87348112a0220b063305db0c5777f933d03cc40a8956a9700" exitCode=0 Jan 04 12:56:18 crc kubenswrapper[4797]: I0104 12:56:18.987574 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerDied","Data":"274e1703a78f52e87348112a0220b063305db0c5777f933d03cc40a8956a9700"} Jan 04 12:56:18 crc kubenswrapper[4797]: I0104 12:56:18.987634 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerStarted","Data":"1ec52142de168bb13de54ff10bda59cd4d72dcb58ddb527a5b9b5dd96c6c15f9"} Jan 04 12:56:18 crc kubenswrapper[4797]: I0104 12:56:18.989237 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:56:19 crc kubenswrapper[4797]: I0104 12:56:19.996059 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerStarted","Data":"e54a22e3974cdc1502504dea898d5bb21681832250c1fe07330a4f798bdde4c2"} Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.517724 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.520889 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.539149 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.647037 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk44k\" (UniqueName: \"kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.647120 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.647249 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.749179 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk44k\" (UniqueName: \"kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.749254 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.749317 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.749854 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:20 crc kubenswrapper[4797]: I0104 12:56:20.750092 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:21 crc kubenswrapper[4797]: I0104 12:56:21.004442 4797 generic.go:334] "Generic (PLEG): container finished" podID="19d69fb3-053e-4087-b878-af2c6010741c" containerID="e54a22e3974cdc1502504dea898d5bb21681832250c1fe07330a4f798bdde4c2" exitCode=0 Jan 04 12:56:21 crc kubenswrapper[4797]: I0104 12:56:21.004483 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerDied","Data":"e54a22e3974cdc1502504dea898d5bb21681832250c1fe07330a4f798bdde4c2"} Jan 04 12:56:21 crc kubenswrapper[4797]: I0104 12:56:21.089147 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk44k\" (UniqueName: \"kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k\") pod \"certified-operators-h7hsr\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:21 crc kubenswrapper[4797]: I0104 12:56:21.170101 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:21 crc kubenswrapper[4797]: I0104 12:56:21.648415 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:22 crc kubenswrapper[4797]: I0104 12:56:22.012120 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerStarted","Data":"8b3e9c41c175e332eddb00016e54355e8131b260eb672848a0ea1087d4e385ea"} Jan 04 12:56:22 crc kubenswrapper[4797]: I0104 12:56:22.013950 4797 generic.go:334] "Generic (PLEG): container finished" podID="2c5f2aed-9565-489b-8a81-a598860db797" containerID="587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513" exitCode=0 Jan 04 12:56:22 crc kubenswrapper[4797]: I0104 12:56:22.014026 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerDied","Data":"587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513"} Jan 04 12:56:22 crc kubenswrapper[4797]: I0104 12:56:22.014053 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerStarted","Data":"a05b54f4a087d519f3dcbfe3b8dfe688a73427d1c2d875eedac4d9e0de6c809a"} Jan 04 12:56:22 crc kubenswrapper[4797]: I0104 12:56:22.044689 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zq4r2" podStartSLOduration=2.540752569 podStartE2EDuration="5.044627147s" podCreationTimestamp="2026-01-04 12:56:17 +0000 UTC" firstStartedPulling="2026-01-04 12:56:18.988979171 +0000 UTC m=+4077.846165880" lastFinishedPulling="2026-01-04 12:56:21.492853749 +0000 UTC m=+4080.350040458" observedRunningTime="2026-01-04 12:56:22.037451521 +0000 UTC m=+4080.894638260" watchObservedRunningTime="2026-01-04 12:56:22.044627147 +0000 UTC m=+4080.901813856" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.021952 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerStarted","Data":"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75"} Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.101499 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.103521 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.105174 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29xmj\" (UniqueName: \"kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.105371 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.105461 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.119676 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.207299 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.207349 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.207407 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29xmj\" (UniqueName: \"kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.208062 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.208091 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.230428 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29xmj\" (UniqueName: \"kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj\") pod \"redhat-marketplace-c67tg\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: E0104 12:56:23.339237 4797 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c5f2aed_9565_489b_8a81_a598860db797.slice/crio-conmon-7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c5f2aed_9565_489b_8a81_a598860db797.slice/crio-7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.434375 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:23 crc kubenswrapper[4797]: I0104 12:56:23.726529 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:24 crc kubenswrapper[4797]: I0104 12:56:24.031752 4797 generic.go:334] "Generic (PLEG): container finished" podID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerID="d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46" exitCode=0 Jan 04 12:56:24 crc kubenswrapper[4797]: I0104 12:56:24.031937 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerDied","Data":"d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46"} Jan 04 12:56:24 crc kubenswrapper[4797]: I0104 12:56:24.032234 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerStarted","Data":"3f1eaa5c6ea9e342747b41ca072a8a8a8b45cc8bc2bec46700523331952b8dea"} Jan 04 12:56:24 crc kubenswrapper[4797]: I0104 12:56:24.035497 4797 generic.go:334] "Generic (PLEG): container finished" podID="2c5f2aed-9565-489b-8a81-a598860db797" containerID="7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75" exitCode=0 Jan 04 12:56:24 crc kubenswrapper[4797]: I0104 12:56:24.035552 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerDied","Data":"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75"} Jan 04 12:56:25 crc kubenswrapper[4797]: I0104 12:56:25.044437 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerStarted","Data":"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492"} Jan 04 12:56:25 crc kubenswrapper[4797]: I0104 12:56:25.047160 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerStarted","Data":"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c"} Jan 04 12:56:25 crc kubenswrapper[4797]: I0104 12:56:25.083127 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h7hsr" podStartSLOduration=2.651157032 podStartE2EDuration="5.083110228s" podCreationTimestamp="2026-01-04 12:56:20 +0000 UTC" firstStartedPulling="2026-01-04 12:56:22.015293964 +0000 UTC m=+4080.872480673" lastFinishedPulling="2026-01-04 12:56:24.44724711 +0000 UTC m=+4083.304433869" observedRunningTime="2026-01-04 12:56:25.081539747 +0000 UTC m=+4083.938726456" watchObservedRunningTime="2026-01-04 12:56:25.083110228 +0000 UTC m=+4083.940296927" Jan 04 12:56:26 crc kubenswrapper[4797]: I0104 12:56:26.057809 4797 generic.go:334] "Generic (PLEG): container finished" podID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerID="5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492" exitCode=0 Jan 04 12:56:26 crc kubenswrapper[4797]: I0104 12:56:26.058669 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerDied","Data":"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492"} Jan 04 12:56:27 crc kubenswrapper[4797]: I0104 12:56:27.887970 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:27 crc kubenswrapper[4797]: I0104 12:56:27.888340 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:27 crc kubenswrapper[4797]: I0104 12:56:27.940692 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:28 crc kubenswrapper[4797]: I0104 12:56:28.077544 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerStarted","Data":"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132"} Jan 04 12:56:28 crc kubenswrapper[4797]: I0104 12:56:28.103844 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c67tg" podStartSLOduration=2.255439582 podStartE2EDuration="5.103825045s" podCreationTimestamp="2026-01-04 12:56:23 +0000 UTC" firstStartedPulling="2026-01-04 12:56:24.034397847 +0000 UTC m=+4082.891584596" lastFinishedPulling="2026-01-04 12:56:26.88278335 +0000 UTC m=+4085.739970059" observedRunningTime="2026-01-04 12:56:28.100072528 +0000 UTC m=+4086.957259247" watchObservedRunningTime="2026-01-04 12:56:28.103825045 +0000 UTC m=+4086.961011764" Jan 04 12:56:28 crc kubenswrapper[4797]: I0104 12:56:28.154492 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:29 crc kubenswrapper[4797]: I0104 12:56:29.896782 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:30 crc kubenswrapper[4797]: I0104 12:56:30.096405 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zq4r2" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="registry-server" containerID="cri-o://8b3e9c41c175e332eddb00016e54355e8131b260eb672848a0ea1087d4e385ea" gracePeriod=2 Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.106941 4797 generic.go:334] "Generic (PLEG): container finished" podID="19d69fb3-053e-4087-b878-af2c6010741c" containerID="8b3e9c41c175e332eddb00016e54355e8131b260eb672848a0ea1087d4e385ea" exitCode=0 Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.107051 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerDied","Data":"8b3e9c41c175e332eddb00016e54355e8131b260eb672848a0ea1087d4e385ea"} Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.170847 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.170952 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.247241 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.262031 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.437771 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd7gz\" (UniqueName: \"kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz\") pod \"19d69fb3-053e-4087-b878-af2c6010741c\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.437901 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities\") pod \"19d69fb3-053e-4087-b878-af2c6010741c\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.437946 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content\") pod \"19d69fb3-053e-4087-b878-af2c6010741c\" (UID: \"19d69fb3-053e-4087-b878-af2c6010741c\") " Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.439119 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities" (OuterVolumeSpecName: "utilities") pod "19d69fb3-053e-4087-b878-af2c6010741c" (UID: "19d69fb3-053e-4087-b878-af2c6010741c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.446599 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz" (OuterVolumeSpecName: "kube-api-access-fd7gz") pod "19d69fb3-053e-4087-b878-af2c6010741c" (UID: "19d69fb3-053e-4087-b878-af2c6010741c"). InnerVolumeSpecName "kube-api-access-fd7gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.540246 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd7gz\" (UniqueName: \"kubernetes.io/projected/19d69fb3-053e-4087-b878-af2c6010741c-kube-api-access-fd7gz\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:31 crc kubenswrapper[4797]: I0104 12:56:31.540646 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.115590 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zq4r2" event={"ID":"19d69fb3-053e-4087-b878-af2c6010741c","Type":"ContainerDied","Data":"1ec52142de168bb13de54ff10bda59cd4d72dcb58ddb527a5b9b5dd96c6c15f9"} Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.115661 4797 scope.go:117] "RemoveContainer" containerID="8b3e9c41c175e332eddb00016e54355e8131b260eb672848a0ea1087d4e385ea" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.115661 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zq4r2" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.124101 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19d69fb3-053e-4087-b878-af2c6010741c" (UID: "19d69fb3-053e-4087-b878-af2c6010741c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.141213 4797 scope.go:117] "RemoveContainer" containerID="e54a22e3974cdc1502504dea898d5bb21681832250c1fe07330a4f798bdde4c2" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.149704 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d69fb3-053e-4087-b878-af2c6010741c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.161817 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.166747 4797 scope.go:117] "RemoveContainer" containerID="274e1703a78f52e87348112a0220b063305db0c5777f933d03cc40a8956a9700" Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.470782 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:32 crc kubenswrapper[4797]: I0104 12:56:32.482480 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zq4r2"] Jan 04 12:56:33 crc kubenswrapper[4797]: I0104 12:56:33.462106 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:33 crc kubenswrapper[4797]: I0104 12:56:33.465663 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:33 crc kubenswrapper[4797]: I0104 12:56:33.505031 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19d69fb3-053e-4087-b878-af2c6010741c" path="/var/lib/kubelet/pods/19d69fb3-053e-4087-b878-af2c6010741c/volumes" Jan 04 12:56:33 crc kubenswrapper[4797]: I0104 12:56:33.506516 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:34 crc kubenswrapper[4797]: I0104 12:56:34.313224 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:34 crc kubenswrapper[4797]: I0104 12:56:34.316966 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h7hsr" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="registry-server" containerID="cri-o://47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c" gracePeriod=2 Jan 04 12:56:34 crc kubenswrapper[4797]: I0104 12:56:34.395761 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:35 crc kubenswrapper[4797]: I0104 12:56:35.897692 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.283169 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.355450 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content\") pod \"2c5f2aed-9565-489b-8a81-a598860db797\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.355779 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities\") pod \"2c5f2aed-9565-489b-8a81-a598860db797\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.357172 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities" (OuterVolumeSpecName: "utilities") pod "2c5f2aed-9565-489b-8a81-a598860db797" (UID: "2c5f2aed-9565-489b-8a81-a598860db797"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.366549 4797 generic.go:334] "Generic (PLEG): container finished" podID="2c5f2aed-9565-489b-8a81-a598860db797" containerID="47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c" exitCode=0 Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.367539 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7hsr" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.367969 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerDied","Data":"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c"} Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.368024 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7hsr" event={"ID":"2c5f2aed-9565-489b-8a81-a598860db797","Type":"ContainerDied","Data":"a05b54f4a087d519f3dcbfe3b8dfe688a73427d1c2d875eedac4d9e0de6c809a"} Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.368045 4797 scope.go:117] "RemoveContainer" containerID="47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.388488 4797 scope.go:117] "RemoveContainer" containerID="7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.413529 4797 scope.go:117] "RemoveContainer" containerID="587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.430546 4797 scope.go:117] "RemoveContainer" containerID="47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c" Jan 04 12:56:36 crc kubenswrapper[4797]: E0104 12:56:36.431074 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c\": container with ID starting with 47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c not found: ID does not exist" containerID="47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.431126 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c"} err="failed to get container status \"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c\": rpc error: code = NotFound desc = could not find container \"47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c\": container with ID starting with 47d61148a2003188b343bf43dd93a6a04695867da91adfaa3782bdd1a26b4d0c not found: ID does not exist" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.431158 4797 scope.go:117] "RemoveContainer" containerID="7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75" Jan 04 12:56:36 crc kubenswrapper[4797]: E0104 12:56:36.431722 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75\": container with ID starting with 7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75 not found: ID does not exist" containerID="7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.431773 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75"} err="failed to get container status \"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75\": rpc error: code = NotFound desc = could not find container \"7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75\": container with ID starting with 7ef58e48674cc0e21b2a07d781051d48498af065685e2ba8e1595b71e08ada75 not found: ID does not exist" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.431807 4797 scope.go:117] "RemoveContainer" containerID="587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513" Jan 04 12:56:36 crc kubenswrapper[4797]: E0104 12:56:36.432183 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513\": container with ID starting with 587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513 not found: ID does not exist" containerID="587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.432224 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513"} err="failed to get container status \"587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513\": rpc error: code = NotFound desc = could not find container \"587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513\": container with ID starting with 587dc54d95588e77c98784833543e68c85b6d65c796d9723cd03478b8202b513 not found: ID does not exist" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.433658 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c5f2aed-9565-489b-8a81-a598860db797" (UID: "2c5f2aed-9565-489b-8a81-a598860db797"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.456779 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk44k\" (UniqueName: \"kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k\") pod \"2c5f2aed-9565-489b-8a81-a598860db797\" (UID: \"2c5f2aed-9565-489b-8a81-a598860db797\") " Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.459078 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.459156 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5f2aed-9565-489b-8a81-a598860db797-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.462570 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k" (OuterVolumeSpecName: "kube-api-access-lk44k") pod "2c5f2aed-9565-489b-8a81-a598860db797" (UID: "2c5f2aed-9565-489b-8a81-a598860db797"). InnerVolumeSpecName "kube-api-access-lk44k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.560928 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk44k\" (UniqueName: \"kubernetes.io/projected/2c5f2aed-9565-489b-8a81-a598860db797-kube-api-access-lk44k\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.719917 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:36 crc kubenswrapper[4797]: I0104 12:56:36.731545 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h7hsr"] Jan 04 12:56:37 crc kubenswrapper[4797]: I0104 12:56:37.380964 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c67tg" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="registry-server" containerID="cri-o://ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132" gracePeriod=2 Jan 04 12:56:37 crc kubenswrapper[4797]: I0104 12:56:37.509283 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c5f2aed-9565-489b-8a81-a598860db797" path="/var/lib/kubelet/pods/2c5f2aed-9565-489b-8a81-a598860db797/volumes" Jan 04 12:56:37 crc kubenswrapper[4797]: I0104 12:56:37.895717 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.084308 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content\") pod \"cca05662-1a7d-4f86-8814-2e4f766ab38f\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.084567 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29xmj\" (UniqueName: \"kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj\") pod \"cca05662-1a7d-4f86-8814-2e4f766ab38f\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.084699 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities\") pod \"cca05662-1a7d-4f86-8814-2e4f766ab38f\" (UID: \"cca05662-1a7d-4f86-8814-2e4f766ab38f\") " Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.086749 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities" (OuterVolumeSpecName: "utilities") pod "cca05662-1a7d-4f86-8814-2e4f766ab38f" (UID: "cca05662-1a7d-4f86-8814-2e4f766ab38f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.092281 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj" (OuterVolumeSpecName: "kube-api-access-29xmj") pod "cca05662-1a7d-4f86-8814-2e4f766ab38f" (UID: "cca05662-1a7d-4f86-8814-2e4f766ab38f"). InnerVolumeSpecName "kube-api-access-29xmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.108337 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cca05662-1a7d-4f86-8814-2e4f766ab38f" (UID: "cca05662-1a7d-4f86-8814-2e4f766ab38f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.186610 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.186667 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cca05662-1a7d-4f86-8814-2e4f766ab38f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.186691 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29xmj\" (UniqueName: \"kubernetes.io/projected/cca05662-1a7d-4f86-8814-2e4f766ab38f-kube-api-access-29xmj\") on node \"crc\" DevicePath \"\"" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.391984 4797 generic.go:334] "Generic (PLEG): container finished" podID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerID="ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132" exitCode=0 Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.392086 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerDied","Data":"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132"} Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.392101 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c67tg" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.392140 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c67tg" event={"ID":"cca05662-1a7d-4f86-8814-2e4f766ab38f","Type":"ContainerDied","Data":"3f1eaa5c6ea9e342747b41ca072a8a8a8b45cc8bc2bec46700523331952b8dea"} Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.392170 4797 scope.go:117] "RemoveContainer" containerID="ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.423100 4797 scope.go:117] "RemoveContainer" containerID="5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.452066 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.462736 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c67tg"] Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.463253 4797 scope.go:117] "RemoveContainer" containerID="d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.497772 4797 scope.go:117] "RemoveContainer" containerID="ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132" Jan 04 12:56:38 crc kubenswrapper[4797]: E0104 12:56:38.498430 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132\": container with ID starting with ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132 not found: ID does not exist" containerID="ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.498491 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132"} err="failed to get container status \"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132\": rpc error: code = NotFound desc = could not find container \"ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132\": container with ID starting with ff6ffe6fb9cb20d4e32b9bb0b48822b7f245724350de23277f5a2d0768fbf132 not found: ID does not exist" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.498526 4797 scope.go:117] "RemoveContainer" containerID="5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492" Jan 04 12:56:38 crc kubenswrapper[4797]: E0104 12:56:38.499023 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492\": container with ID starting with 5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492 not found: ID does not exist" containerID="5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.499101 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492"} err="failed to get container status \"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492\": rpc error: code = NotFound desc = could not find container \"5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492\": container with ID starting with 5fd77d7efb071a37dec863d30b4e3a4e02d3e17ee6c404c42cc2a35d057b1492 not found: ID does not exist" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.499154 4797 scope.go:117] "RemoveContainer" containerID="d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46" Jan 04 12:56:38 crc kubenswrapper[4797]: E0104 12:56:38.499708 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46\": container with ID starting with d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46 not found: ID does not exist" containerID="d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46" Jan 04 12:56:38 crc kubenswrapper[4797]: I0104 12:56:38.499747 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46"} err="failed to get container status \"d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46\": rpc error: code = NotFound desc = could not find container \"d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46\": container with ID starting with d8c14f1f8c899eefddb79b5ac17df0c52e9c355e5882f9a9fcdf679341b2eb46 not found: ID does not exist" Jan 04 12:56:39 crc kubenswrapper[4797]: I0104 12:56:39.487630 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" path="/var/lib/kubelet/pods/cca05662-1a7d-4f86-8814-2e4f766ab38f/volumes" Jan 04 12:57:19 crc kubenswrapper[4797]: I0104 12:57:19.493051 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:57:19 crc kubenswrapper[4797]: I0104 12:57:19.493781 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:57:49 crc kubenswrapper[4797]: I0104 12:57:49.493873 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:57:49 crc kubenswrapper[4797]: I0104 12:57:49.495219 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:58:19 crc kubenswrapper[4797]: I0104 12:58:19.493290 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:58:19 crc kubenswrapper[4797]: I0104 12:58:19.493749 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:58:19 crc kubenswrapper[4797]: I0104 12:58:19.493789 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 12:58:19 crc kubenswrapper[4797]: I0104 12:58:19.494307 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:58:19 crc kubenswrapper[4797]: I0104 12:58:19.494387 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca" gracePeriod=600 Jan 04 12:58:20 crc kubenswrapper[4797]: I0104 12:58:20.348682 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca" exitCode=0 Jan 04 12:58:20 crc kubenswrapper[4797]: I0104 12:58:20.348764 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca"} Jan 04 12:58:20 crc kubenswrapper[4797]: I0104 12:58:20.349153 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a"} Jan 04 12:58:20 crc kubenswrapper[4797]: I0104 12:58:20.349187 4797 scope.go:117] "RemoveContainer" containerID="05c23fc0579e924d0ffe31885168b134194c60e1576f709a1c4fbfec0a9ebdc8" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.172892 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45"] Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173793 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173810 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173833 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173841 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173854 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173863 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173876 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173883 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173894 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173902 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173921 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173928 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173937 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173944 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173956 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173963 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[4797]: E0104 13:00:00.173977 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.173984 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.174135 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c5f2aed-9565-489b-8a81-a598860db797" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.174151 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="19d69fb3-053e-4087-b878-af2c6010741c" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.174164 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="cca05662-1a7d-4f86-8814-2e4f766ab38f" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.174636 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.177481 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.177724 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.212777 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.212833 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.213059 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz4jc\" (UniqueName: \"kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.219975 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45"] Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.314618 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.314667 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.314739 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz4jc\" (UniqueName: \"kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.316008 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.324887 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.347422 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz4jc\" (UniqueName: \"kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc\") pod \"collect-profiles-29458860-sbv45\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.491666 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:00 crc kubenswrapper[4797]: I0104 13:00:00.728215 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45"] Jan 04 13:00:01 crc kubenswrapper[4797]: I0104 13:00:01.224014 4797 generic.go:334] "Generic (PLEG): container finished" podID="d8d7e79e-019a-45e2-af81-49e0d16adb2c" containerID="c1ef8b4b2a63f37b1a056ccea521afba4a2c6f2ae1c810f09d1866544bf17e47" exitCode=0 Jan 04 13:00:01 crc kubenswrapper[4797]: I0104 13:00:01.224094 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" event={"ID":"d8d7e79e-019a-45e2-af81-49e0d16adb2c","Type":"ContainerDied","Data":"c1ef8b4b2a63f37b1a056ccea521afba4a2c6f2ae1c810f09d1866544bf17e47"} Jan 04 13:00:01 crc kubenswrapper[4797]: I0104 13:00:01.224380 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" event={"ID":"d8d7e79e-019a-45e2-af81-49e0d16adb2c","Type":"ContainerStarted","Data":"5e25d737a72661f390f95c662e57d497e6e9411fb90b96f3a1f1044f1d27510e"} Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.630402 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.763104 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz4jc\" (UniqueName: \"kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc\") pod \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.763232 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume\") pod \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.763296 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume\") pod \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\" (UID: \"d8d7e79e-019a-45e2-af81-49e0d16adb2c\") " Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.763969 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume" (OuterVolumeSpecName: "config-volume") pod "d8d7e79e-019a-45e2-af81-49e0d16adb2c" (UID: "d8d7e79e-019a-45e2-af81-49e0d16adb2c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.768199 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d8d7e79e-019a-45e2-af81-49e0d16adb2c" (UID: "d8d7e79e-019a-45e2-af81-49e0d16adb2c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.773850 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc" (OuterVolumeSpecName: "kube-api-access-lz4jc") pod "d8d7e79e-019a-45e2-af81-49e0d16adb2c" (UID: "d8d7e79e-019a-45e2-af81-49e0d16adb2c"). InnerVolumeSpecName "kube-api-access-lz4jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.865682 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz4jc\" (UniqueName: \"kubernetes.io/projected/d8d7e79e-019a-45e2-af81-49e0d16adb2c-kube-api-access-lz4jc\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.865941 4797 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8d7e79e-019a-45e2-af81-49e0d16adb2c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:02 crc kubenswrapper[4797]: I0104 13:00:02.865955 4797 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8d7e79e-019a-45e2-af81-49e0d16adb2c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:03 crc kubenswrapper[4797]: I0104 13:00:03.239593 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" event={"ID":"d8d7e79e-019a-45e2-af81-49e0d16adb2c","Type":"ContainerDied","Data":"5e25d737a72661f390f95c662e57d497e6e9411fb90b96f3a1f1044f1d27510e"} Jan 04 13:00:03 crc kubenswrapper[4797]: I0104 13:00:03.239910 4797 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e25d737a72661f390f95c662e57d497e6e9411fb90b96f3a1f1044f1d27510e" Jan 04 13:00:03 crc kubenswrapper[4797]: I0104 13:00:03.239687 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-sbv45" Jan 04 13:00:03 crc kubenswrapper[4797]: I0104 13:00:03.737451 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw"] Jan 04 13:00:03 crc kubenswrapper[4797]: I0104 13:00:03.743802 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-nqzcw"] Jan 04 13:00:05 crc kubenswrapper[4797]: I0104 13:00:05.491212 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="392d2062-b718-4eaf-a87d-9d990d0dfc0e" path="/var/lib/kubelet/pods/392d2062-b718-4eaf-a87d-9d990d0dfc0e/volumes" Jan 04 13:00:19 crc kubenswrapper[4797]: I0104 13:00:19.494149 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:00:19 crc kubenswrapper[4797]: I0104 13:00:19.494899 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:00:27 crc kubenswrapper[4797]: I0104 13:00:27.207715 4797 scope.go:117] "RemoveContainer" containerID="d54a12406a5fa13f931a1ad03f4b0c3e8ec9df1c6d8b5aa76c97d4da8e059ca4" Jan 04 13:00:49 crc kubenswrapper[4797]: I0104 13:00:49.492684 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:00:49 crc kubenswrapper[4797]: I0104 13:00:49.493420 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.031922 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:13 crc kubenswrapper[4797]: E0104 13:01:13.033150 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d7e79e-019a-45e2-af81-49e0d16adb2c" containerName="collect-profiles" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.033172 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d7e79e-019a-45e2-af81-49e0d16adb2c" containerName="collect-profiles" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.033421 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8d7e79e-019a-45e2-af81-49e0d16adb2c" containerName="collect-profiles" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.035180 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.060936 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.061151 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.061223 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.061375 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k942\" (UniqueName: \"kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.162612 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.162685 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.162738 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k942\" (UniqueName: \"kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.163644 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.163922 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.195572 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k942\" (UniqueName: \"kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942\") pod \"community-operators-v5pw7\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.386528 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:13 crc kubenswrapper[4797]: I0104 13:01:13.959615 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:13 crc kubenswrapper[4797]: W0104 13:01:13.961655 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc047b8c7_098c_4173_9ef6_0a361f5d3668.slice/crio-11cbddc4821536077ef15ecdfde4ef7e100660477fc874ce8a145d0d5220f877 WatchSource:0}: Error finding container 11cbddc4821536077ef15ecdfde4ef7e100660477fc874ce8a145d0d5220f877: Status 404 returned error can't find the container with id 11cbddc4821536077ef15ecdfde4ef7e100660477fc874ce8a145d0d5220f877 Jan 04 13:01:14 crc kubenswrapper[4797]: I0104 13:01:14.847314 4797 generic.go:334] "Generic (PLEG): container finished" podID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerID="d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677" exitCode=0 Jan 04 13:01:14 crc kubenswrapper[4797]: I0104 13:01:14.847383 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerDied","Data":"d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677"} Jan 04 13:01:14 crc kubenswrapper[4797]: I0104 13:01:14.847846 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerStarted","Data":"11cbddc4821536077ef15ecdfde4ef7e100660477fc874ce8a145d0d5220f877"} Jan 04 13:01:15 crc kubenswrapper[4797]: I0104 13:01:15.858611 4797 generic.go:334] "Generic (PLEG): container finished" podID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerID="a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e" exitCode=0 Jan 04 13:01:15 crc kubenswrapper[4797]: I0104 13:01:15.858691 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerDied","Data":"a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e"} Jan 04 13:01:16 crc kubenswrapper[4797]: I0104 13:01:16.872277 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerStarted","Data":"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622"} Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.492780 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.492884 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.492936 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.494019 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.494134 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" gracePeriod=600 Jan 04 13:01:19 crc kubenswrapper[4797]: E0104 13:01:19.612805 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.896230 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" exitCode=0 Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.896278 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a"} Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.896318 4797 scope.go:117] "RemoveContainer" containerID="5b14750b596b69db9ddf6bd61a4411f1d78fbba3802d408fd4d4058220b274ca" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.896773 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:01:19 crc kubenswrapper[4797]: E0104 13:01:19.897015 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:01:19 crc kubenswrapper[4797]: I0104 13:01:19.914456 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v5pw7" podStartSLOduration=6.438985145 podStartE2EDuration="7.914405338s" podCreationTimestamp="2026-01-04 13:01:12 +0000 UTC" firstStartedPulling="2026-01-04 13:01:14.84967623 +0000 UTC m=+4373.706862979" lastFinishedPulling="2026-01-04 13:01:16.325096423 +0000 UTC m=+4375.182283172" observedRunningTime="2026-01-04 13:01:16.894820683 +0000 UTC m=+4375.752007462" watchObservedRunningTime="2026-01-04 13:01:19.914405338 +0000 UTC m=+4378.771592087" Jan 04 13:01:23 crc kubenswrapper[4797]: I0104 13:01:23.387461 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:23 crc kubenswrapper[4797]: I0104 13:01:23.387749 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:23 crc kubenswrapper[4797]: I0104 13:01:23.437581 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:24 crc kubenswrapper[4797]: I0104 13:01:24.032797 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:24 crc kubenswrapper[4797]: I0104 13:01:24.101116 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:25 crc kubenswrapper[4797]: I0104 13:01:25.949961 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v5pw7" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="registry-server" containerID="cri-o://541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622" gracePeriod=2 Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.887300 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.962034 4797 generic.go:334] "Generic (PLEG): container finished" podID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerID="541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622" exitCode=0 Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.962098 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerDied","Data":"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622"} Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.962168 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5pw7" event={"ID":"c047b8c7-098c-4173-9ef6-0a361f5d3668","Type":"ContainerDied","Data":"11cbddc4821536077ef15ecdfde4ef7e100660477fc874ce8a145d0d5220f877"} Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.962126 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5pw7" Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.962200 4797 scope.go:117] "RemoveContainer" containerID="541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622" Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.980984 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content\") pod \"c047b8c7-098c-4173-9ef6-0a361f5d3668\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.981105 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities\") pod \"c047b8c7-098c-4173-9ef6-0a361f5d3668\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.981226 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k942\" (UniqueName: \"kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942\") pod \"c047b8c7-098c-4173-9ef6-0a361f5d3668\" (UID: \"c047b8c7-098c-4173-9ef6-0a361f5d3668\") " Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.983675 4797 scope.go:117] "RemoveContainer" containerID="a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e" Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.984981 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities" (OuterVolumeSpecName: "utilities") pod "c047b8c7-098c-4173-9ef6-0a361f5d3668" (UID: "c047b8c7-098c-4173-9ef6-0a361f5d3668"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:01:26 crc kubenswrapper[4797]: I0104 13:01:26.990662 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942" (OuterVolumeSpecName: "kube-api-access-8k942") pod "c047b8c7-098c-4173-9ef6-0a361f5d3668" (UID: "c047b8c7-098c-4173-9ef6-0a361f5d3668"). InnerVolumeSpecName "kube-api-access-8k942". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.040426 4797 scope.go:117] "RemoveContainer" containerID="d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.067547 4797 scope.go:117] "RemoveContainer" containerID="541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622" Jan 04 13:01:27 crc kubenswrapper[4797]: E0104 13:01:27.068372 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622\": container with ID starting with 541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622 not found: ID does not exist" containerID="541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.068432 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622"} err="failed to get container status \"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622\": rpc error: code = NotFound desc = could not find container \"541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622\": container with ID starting with 541bb751125cd16ccec2d28b527e4b873c929b513614b53e3de6a7bce6ddd622 not found: ID does not exist" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.068469 4797 scope.go:117] "RemoveContainer" containerID="a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e" Jan 04 13:01:27 crc kubenswrapper[4797]: E0104 13:01:27.068921 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e\": container with ID starting with a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e not found: ID does not exist" containerID="a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.069146 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e"} err="failed to get container status \"a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e\": rpc error: code = NotFound desc = could not find container \"a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e\": container with ID starting with a930770c8627355f502df2ea883a0e0067e0b4d9d5b2b939fd72621c51ed033e not found: ID does not exist" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.069328 4797 scope.go:117] "RemoveContainer" containerID="d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677" Jan 04 13:01:27 crc kubenswrapper[4797]: E0104 13:01:27.070305 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677\": container with ID starting with d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677 not found: ID does not exist" containerID="d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.070346 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677"} err="failed to get container status \"d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677\": rpc error: code = NotFound desc = could not find container \"d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677\": container with ID starting with d3ba01841ff75ec0e65aea441afd59ad09060583a30ee0c9377112ed475ed677 not found: ID does not exist" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.082911 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k942\" (UniqueName: \"kubernetes.io/projected/c047b8c7-098c-4173-9ef6-0a361f5d3668-kube-api-access-8k942\") on node \"crc\" DevicePath \"\"" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.082978 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.089319 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c047b8c7-098c-4173-9ef6-0a361f5d3668" (UID: "c047b8c7-098c-4173-9ef6-0a361f5d3668"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.184305 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c047b8c7-098c-4173-9ef6-0a361f5d3668-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.320239 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.330625 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v5pw7"] Jan 04 13:01:27 crc kubenswrapper[4797]: I0104 13:01:27.489714 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" path="/var/lib/kubelet/pods/c047b8c7-098c-4173-9ef6-0a361f5d3668/volumes" Jan 04 13:01:31 crc kubenswrapper[4797]: I0104 13:01:31.483962 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:01:31 crc kubenswrapper[4797]: E0104 13:01:31.484646 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:01:44 crc kubenswrapper[4797]: I0104 13:01:44.474103 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:01:44 crc kubenswrapper[4797]: E0104 13:01:44.475404 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:01:56 crc kubenswrapper[4797]: I0104 13:01:56.474357 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:01:56 crc kubenswrapper[4797]: E0104 13:01:56.475329 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:02:08 crc kubenswrapper[4797]: I0104 13:02:08.474477 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:02:08 crc kubenswrapper[4797]: E0104 13:02:08.475590 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:02:22 crc kubenswrapper[4797]: I0104 13:02:22.474382 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:02:22 crc kubenswrapper[4797]: E0104 13:02:22.475522 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:02:37 crc kubenswrapper[4797]: I0104 13:02:37.475208 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:02:37 crc kubenswrapper[4797]: E0104 13:02:37.476334 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:02:51 crc kubenswrapper[4797]: I0104 13:02:51.482171 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:02:51 crc kubenswrapper[4797]: E0104 13:02:51.483469 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:03:06 crc kubenswrapper[4797]: I0104 13:03:06.475253 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:03:06 crc kubenswrapper[4797]: E0104 13:03:06.476683 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:03:21 crc kubenswrapper[4797]: I0104 13:03:21.484336 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:03:21 crc kubenswrapper[4797]: E0104 13:03:21.485503 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:03:32 crc kubenswrapper[4797]: I0104 13:03:32.473446 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:03:32 crc kubenswrapper[4797]: E0104 13:03:32.474189 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:03:44 crc kubenswrapper[4797]: I0104 13:03:44.474434 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:03:44 crc kubenswrapper[4797]: E0104 13:03:44.475877 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:03:58 crc kubenswrapper[4797]: I0104 13:03:58.473822 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:03:58 crc kubenswrapper[4797]: E0104 13:03:58.474581 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:04:10 crc kubenswrapper[4797]: I0104 13:04:10.474675 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:04:10 crc kubenswrapper[4797]: E0104 13:04:10.475511 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:04:25 crc kubenswrapper[4797]: I0104 13:04:25.474111 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:04:25 crc kubenswrapper[4797]: E0104 13:04:25.475086 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:04:40 crc kubenswrapper[4797]: I0104 13:04:40.474291 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:04:40 crc kubenswrapper[4797]: E0104 13:04:40.476933 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:04:55 crc kubenswrapper[4797]: I0104 13:04:55.474932 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:04:55 crc kubenswrapper[4797]: E0104 13:04:55.476129 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:05:08 crc kubenswrapper[4797]: I0104 13:05:08.474276 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:05:08 crc kubenswrapper[4797]: E0104 13:05:08.475346 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:05:19 crc kubenswrapper[4797]: I0104 13:05:19.475139 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:05:19 crc kubenswrapper[4797]: E0104 13:05:19.476139 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:05:31 crc kubenswrapper[4797]: I0104 13:05:31.479529 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:05:31 crc kubenswrapper[4797]: E0104 13:05:31.480199 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.523274 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s567t/must-gather-t9fl9"] Jan 04 13:05:37 crc kubenswrapper[4797]: E0104 13:05:37.524406 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="registry-server" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.524423 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="registry-server" Jan 04 13:05:37 crc kubenswrapper[4797]: E0104 13:05:37.524460 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="extract-content" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.524469 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="extract-content" Jan 04 13:05:37 crc kubenswrapper[4797]: E0104 13:05:37.524516 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="extract-utilities" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.524526 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="extract-utilities" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.524874 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c047b8c7-098c-4173-9ef6-0a361f5d3668" containerName="registry-server" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.526431 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.539613 4797 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-s567t"/"default-dockercfg-9k828" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.539977 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s567t"/"kube-root-ca.crt" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.540030 4797 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s567t"/"openshift-service-ca.crt" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.547142 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s567t/must-gather-t9fl9"] Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.629894 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.630082 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sns5g\" (UniqueName: \"kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.731639 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.731966 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sns5g\" (UniqueName: \"kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.732313 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.762576 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sns5g\" (UniqueName: \"kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g\") pod \"must-gather-t9fl9\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:37 crc kubenswrapper[4797]: I0104 13:05:37.851417 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:05:38 crc kubenswrapper[4797]: I0104 13:05:38.304765 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s567t/must-gather-t9fl9"] Jan 04 13:05:38 crc kubenswrapper[4797]: I0104 13:05:38.320609 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:05:39 crc kubenswrapper[4797]: I0104 13:05:39.096268 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s567t/must-gather-t9fl9" event={"ID":"d009ae5a-2d1d-4458-b654-bf30d1a32bc1","Type":"ContainerStarted","Data":"694b73d1f6c9fc068b67c6f89cd832e362c9a3f40761c61b68f300af2c763fd1"} Jan 04 13:05:45 crc kubenswrapper[4797]: I0104 13:05:45.475762 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:05:45 crc kubenswrapper[4797]: E0104 13:05:45.476805 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:05:46 crc kubenswrapper[4797]: I0104 13:05:46.166425 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s567t/must-gather-t9fl9" event={"ID":"d009ae5a-2d1d-4458-b654-bf30d1a32bc1","Type":"ContainerStarted","Data":"d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d"} Jan 04 13:05:46 crc kubenswrapper[4797]: I0104 13:05:46.166653 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s567t/must-gather-t9fl9" event={"ID":"d009ae5a-2d1d-4458-b654-bf30d1a32bc1","Type":"ContainerStarted","Data":"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb"} Jan 04 13:05:46 crc kubenswrapper[4797]: I0104 13:05:46.183186 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-s567t/must-gather-t9fl9" podStartSLOduration=2.058398331 podStartE2EDuration="9.183159549s" podCreationTimestamp="2026-01-04 13:05:37 +0000 UTC" firstStartedPulling="2026-01-04 13:05:38.320407706 +0000 UTC m=+4637.177594415" lastFinishedPulling="2026-01-04 13:05:45.445168924 +0000 UTC m=+4644.302355633" observedRunningTime="2026-01-04 13:05:46.181017012 +0000 UTC m=+4645.038203721" watchObservedRunningTime="2026-01-04 13:05:46.183159549 +0000 UTC m=+4645.040346258" Jan 04 13:05:57 crc kubenswrapper[4797]: I0104 13:05:57.474260 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:05:57 crc kubenswrapper[4797]: E0104 13:05:57.475143 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:06:09 crc kubenswrapper[4797]: I0104 13:06:09.474088 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:06:09 crc kubenswrapper[4797]: E0104 13:06:09.474821 4797 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2dbq6_openshift-machine-config-operator(9f279bbd-812a-4617-b821-852c35954cb6)\"" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" Jan 04 13:06:20 crc kubenswrapper[4797]: I0104 13:06:20.474632 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:06:21 crc kubenswrapper[4797]: I0104 13:06:21.453423 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7"} Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.829773 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.847026 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.876581 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.971505 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99pgk\" (UniqueName: \"kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.971711 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:37 crc kubenswrapper[4797]: I0104 13:06:37.971751 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.073104 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.073175 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.073231 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99pgk\" (UniqueName: \"kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.073710 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.074025 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.092460 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99pgk\" (UniqueName: \"kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk\") pod \"redhat-marketplace-px7tt\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.193345 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.668169 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:38 crc kubenswrapper[4797]: I0104 13:06:38.764134 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerStarted","Data":"96a7d070cafe5a812af26a89971a9257df93b0ca74170c728755504db901c572"} Jan 04 13:06:39 crc kubenswrapper[4797]: I0104 13:06:39.775150 4797 generic.go:334] "Generic (PLEG): container finished" podID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerID="6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b" exitCode=0 Jan 04 13:06:39 crc kubenswrapper[4797]: I0104 13:06:39.775266 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerDied","Data":"6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b"} Jan 04 13:06:40 crc kubenswrapper[4797]: I0104 13:06:40.783935 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerStarted","Data":"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f"} Jan 04 13:06:41 crc kubenswrapper[4797]: I0104 13:06:41.793393 4797 generic.go:334] "Generic (PLEG): container finished" podID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerID="d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f" exitCode=0 Jan 04 13:06:41 crc kubenswrapper[4797]: I0104 13:06:41.793520 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerDied","Data":"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f"} Jan 04 13:06:42 crc kubenswrapper[4797]: I0104 13:06:42.802453 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerStarted","Data":"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3"} Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.193828 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.194412 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.262653 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.285060 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-px7tt" podStartSLOduration=8.720085376 podStartE2EDuration="11.285037881s" podCreationTimestamp="2026-01-04 13:06:37 +0000 UTC" firstStartedPulling="2026-01-04 13:06:39.777258167 +0000 UTC m=+4698.634444876" lastFinishedPulling="2026-01-04 13:06:42.342210672 +0000 UTC m=+4701.199397381" observedRunningTime="2026-01-04 13:06:42.835674404 +0000 UTC m=+4701.692861113" watchObservedRunningTime="2026-01-04 13:06:48.285037881 +0000 UTC m=+4707.142224590" Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.891570 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:48 crc kubenswrapper[4797]: I0104 13:06:48.951082 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:49 crc kubenswrapper[4797]: I0104 13:06:49.667940 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f6f74d6db-rdrvx_23c167db-cdda-45e6-a380-d2bcec3278aa/manager/0.log" Jan 04 13:06:49 crc kubenswrapper[4797]: I0104 13:06:49.890515 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-78979fc445-6c2kv_22873120-9025-46b8-9e9e-8cb0764c199e/manager/0.log" Jan 04 13:06:49 crc kubenswrapper[4797]: I0104 13:06:49.923056 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/util/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.096916 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/pull/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.121158 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/util/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.133151 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/pull/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.362103 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/pull/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.367699 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/extract/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.372756 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707ddn9b2f_2b48df1b-61fb-4eb0-99f9-2c159b667a3b/util/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.576276 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-66f8b87655-mq2pt_7dc88e26-12f5-480c-b774-8512e7356ab9/manager/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.652765 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7b549fc966-86pbp_bb443027-9af5-40c8-b7dd-72ed080799be/manager/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.754573 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-658dd65b86-x8kvm_b6950105-3c91-45a1-ad35-9871a20ed456/manager/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.837228 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7f5ddd8d7b-ftvkh_7806cbd3-d72f-4b26-83b9-1dee8d7d5489/manager/0.log" Jan 04 13:06:50 crc kubenswrapper[4797]: I0104 13:06:50.856336 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-px7tt" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="registry-server" containerID="cri-o://470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3" gracePeriod=2 Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.038427 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-f99f54bc8-ncp7k_e0eb23c2-253c-422a-9ad9-736b6a2e7beb/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.092408 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6d99759cf-ql65x_cd5be9d0-3d4f-46a2-9849-f4aa5f7c73bf/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.284413 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-568985c78-pktt5_c5ea2cde-563f-4d84-a3cf-8292472baaa1/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.566581 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.658678 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content\") pod \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.658752 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities\") pod \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.658812 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99pgk\" (UniqueName: \"kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk\") pod \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\" (UID: \"c2b17006-d911-4e3b-bee5-1b4abc20b1c1\") " Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.659637 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities" (OuterVolumeSpecName: "utilities") pod "c2b17006-d911-4e3b-bee5-1b4abc20b1c1" (UID: "c2b17006-d911-4e3b-bee5-1b4abc20b1c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.666232 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk" (OuterVolumeSpecName: "kube-api-access-99pgk") pod "c2b17006-d911-4e3b-bee5-1b4abc20b1c1" (UID: "c2b17006-d911-4e3b-bee5-1b4abc20b1c1"). InnerVolumeSpecName "kube-api-access-99pgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.687339 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2b17006-d911-4e3b-bee5-1b4abc20b1c1" (UID: "c2b17006-d911-4e3b-bee5-1b4abc20b1c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.716446 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-598945d5b8-fnm8x_2d49544b-5665-46d3-8a14-fad6d8ecf7bb/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.745253 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7b88bfc995-8hq46_f421383f-618c-4c24-80da-28db8ef0723a/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.760602 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99pgk\" (UniqueName: \"kubernetes.io/projected/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-kube-api-access-99pgk\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.760637 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.760649 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b17006-d911-4e3b-bee5-1b4abc20b1c1-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.865258 4797 generic.go:334] "Generic (PLEG): container finished" podID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerID="470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3" exitCode=0 Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.865305 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerDied","Data":"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3"} Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.865332 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px7tt" event={"ID":"c2b17006-d911-4e3b-bee5-1b4abc20b1c1","Type":"ContainerDied","Data":"96a7d070cafe5a812af26a89971a9257df93b0ca74170c728755504db901c572"} Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.865350 4797 scope.go:117] "RemoveContainer" containerID="470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.865759 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px7tt" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.889186 4797 scope.go:117] "RemoveContainer" containerID="d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.903841 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.908420 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-px7tt"] Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.910445 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7cd87b778f-s7lmf_b4d1813a-0643-4fff-9bc6-6f065accb1bc/manager/0.log" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.935158 4797 scope.go:117] "RemoveContainer" containerID="6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.969693 4797 scope.go:117] "RemoveContainer" containerID="470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3" Jan 04 13:06:51 crc kubenswrapper[4797]: E0104 13:06:51.974805 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3\": container with ID starting with 470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3 not found: ID does not exist" containerID="470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.974858 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3"} err="failed to get container status \"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3\": rpc error: code = NotFound desc = could not find container \"470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3\": container with ID starting with 470f5f8371252b9460f874a4a967199c5a4bc844ddad4d5781dc5b3e0009e8b3 not found: ID does not exist" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.974890 4797 scope.go:117] "RemoveContainer" containerID="d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f" Jan 04 13:06:51 crc kubenswrapper[4797]: E0104 13:06:51.975479 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f\": container with ID starting with d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f not found: ID does not exist" containerID="d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.975507 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f"} err="failed to get container status \"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f\": rpc error: code = NotFound desc = could not find container \"d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f\": container with ID starting with d97e4d26b6cdeb6e139f18010b438de2304dcbadaf8bf7877e507bc6f438787f not found: ID does not exist" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.975522 4797 scope.go:117] "RemoveContainer" containerID="6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b" Jan 04 13:06:51 crc kubenswrapper[4797]: E0104 13:06:51.976591 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b\": container with ID starting with 6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b not found: ID does not exist" containerID="6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b" Jan 04 13:06:51 crc kubenswrapper[4797]: I0104 13:06:51.976620 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b"} err="failed to get container status \"6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b\": rpc error: code = NotFound desc = could not find container \"6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b\": container with ID starting with 6abb364cd0c72e195ebf4cec768a84326429defc48bf067dc98ef027ee86e14b not found: ID does not exist" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.045849 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5fbbf8b6cc-sw9g8_81b3cb8a-4d8c-4484-a935-54870fd8631d/manager/0.log" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.119548 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-68c649d9d-7bmtj_81235795-0c7e-40b3-bbe3-691d627dc863/manager/0.log" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.207371 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5c4776bcc5h2b88_158d06c2-999b-4a0e-b214-b56a428deeb8/manager/0.log" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.593294 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6879547b79-tt2dv_c6b7d005-2a92-4f31-8fa0-5cc3d8ab64a5/operator/0.log" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.684207 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zz877_1c7b6fcc-0ea8-47c5-8b9f-cee89e41faaf/registry-server/0.log" Jan 04 13:06:52 crc kubenswrapper[4797]: I0104 13:06:52.971166 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-bf6d4f946-zk4v4_73d3d93a-3be7-4ade-bfc0-fbc0b26bb8ba/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.049121 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7df7568dd6-97vck_cf7b4084-7aaf-42c6-9cdc-656863de1ed7/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.057632 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-9b6f8f78c-5npjh_fa826769-776f-42e2-ad58-f528ca756f03/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.170183 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-2tb7m_0a0a3dfd-8fb8-4ae1-9994-4dfe24f164eb/operator/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.234494 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bb586bbf4-wlg4f_8a0379ed-3206-48b0-8822-61cac55ba4cb/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.373785 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-68d988df55-bljb9_0ee7d1ba-194c-4603-887a-0472397bda7c/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.402209 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6c866cfdcb-28htf_de2f9a17-64b4-4dc6-ab79-9ddc97e1927f/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.406191 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-9dbdf6486-l6drs_5c97e032-2a27-4cd4-bcd7-70d423968689/manager/0.log" Jan 04 13:06:53 crc kubenswrapper[4797]: I0104 13:06:53.483525 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" path="/var/lib/kubelet/pods/c2b17006-d911-4e3b-bee5-1b4abc20b1c1/volumes" Jan 04 13:07:13 crc kubenswrapper[4797]: I0104 13:07:13.218428 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-vn8mq_ec4dbb28-5584-44b3-9b23-6e9f811f546d/control-plane-machine-set-operator/0.log" Jan 04 13:07:13 crc kubenswrapper[4797]: I0104 13:07:13.359941 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mhvp5_0d05b9b3-d6d6-4fcc-9291-1ffac489c644/kube-rbac-proxy/0.log" Jan 04 13:07:13 crc kubenswrapper[4797]: I0104 13:07:13.404246 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mhvp5_0d05b9b3-d6d6-4fcc-9291-1ffac489c644/machine-api-operator/0.log" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.837286 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:17 crc kubenswrapper[4797]: E0104 13:07:17.838523 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="extract-utilities" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.838545 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="extract-utilities" Jan 04 13:07:17 crc kubenswrapper[4797]: E0104 13:07:17.838586 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="registry-server" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.838599 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="registry-server" Jan 04 13:07:17 crc kubenswrapper[4797]: E0104 13:07:17.838625 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="extract-content" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.838637 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="extract-content" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.838920 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2b17006-d911-4e3b-bee5-1b4abc20b1c1" containerName="registry-server" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.840649 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:17 crc kubenswrapper[4797]: I0104 13:07:17.853738 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.042385 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.042569 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jz86\" (UniqueName: \"kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.042847 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.143414 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.143489 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.143539 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jz86\" (UniqueName: \"kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.144020 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.144130 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.166661 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jz86\" (UniqueName: \"kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86\") pod \"redhat-operators-f7bk5\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.457169 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:18 crc kubenswrapper[4797]: I0104 13:07:18.926649 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:19 crc kubenswrapper[4797]: I0104 13:07:19.079644 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerStarted","Data":"75ebe0a22153abcc57f5c4973c04f6f37ff91133485b3a87d36c4404c9df63b9"} Jan 04 13:07:20 crc kubenswrapper[4797]: I0104 13:07:20.090485 4797 generic.go:334] "Generic (PLEG): container finished" podID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerID="dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf" exitCode=0 Jan 04 13:07:20 crc kubenswrapper[4797]: I0104 13:07:20.090615 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerDied","Data":"dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf"} Jan 04 13:07:21 crc kubenswrapper[4797]: I0104 13:07:21.100546 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerStarted","Data":"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d"} Jan 04 13:07:22 crc kubenswrapper[4797]: I0104 13:07:22.111275 4797 generic.go:334] "Generic (PLEG): container finished" podID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerID="a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d" exitCode=0 Jan 04 13:07:22 crc kubenswrapper[4797]: I0104 13:07:22.111370 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerDied","Data":"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d"} Jan 04 13:07:23 crc kubenswrapper[4797]: I0104 13:07:23.123222 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerStarted","Data":"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac"} Jan 04 13:07:23 crc kubenswrapper[4797]: I0104 13:07:23.153936 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f7bk5" podStartSLOduration=3.51066602 podStartE2EDuration="6.153894127s" podCreationTimestamp="2026-01-04 13:07:17 +0000 UTC" firstStartedPulling="2026-01-04 13:07:20.093700302 +0000 UTC m=+4738.950887011" lastFinishedPulling="2026-01-04 13:07:22.736928379 +0000 UTC m=+4741.594115118" observedRunningTime="2026-01-04 13:07:23.149661144 +0000 UTC m=+4742.006847863" watchObservedRunningTime="2026-01-04 13:07:23.153894127 +0000 UTC m=+4742.011080846" Jan 04 13:07:28 crc kubenswrapper[4797]: I0104 13:07:28.109047 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-bjstj_f3175674-5f60-4607-9755-5ee1295171c9/cert-manager-controller/0.log" Jan 04 13:07:28 crc kubenswrapper[4797]: I0104 13:07:28.288582 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-gml2q_2109ab50-1b14-45a7-a2a9-a415791eddb1/cert-manager-cainjector/0.log" Jan 04 13:07:28 crc kubenswrapper[4797]: I0104 13:07:28.422313 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-nxlgw_8da1d315-7862-496b-bb8d-0b8a56e7ebe8/cert-manager-webhook/0.log" Jan 04 13:07:28 crc kubenswrapper[4797]: I0104 13:07:28.457526 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:28 crc kubenswrapper[4797]: I0104 13:07:28.457659 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:29 crc kubenswrapper[4797]: I0104 13:07:29.496286 4797 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f7bk5" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="registry-server" probeResult="failure" output=< Jan 04 13:07:29 crc kubenswrapper[4797]: timeout: failed to connect service ":50051" within 1s Jan 04 13:07:29 crc kubenswrapper[4797]: > Jan 04 13:07:38 crc kubenswrapper[4797]: I0104 13:07:38.528502 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:38 crc kubenswrapper[4797]: I0104 13:07:38.595011 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:38 crc kubenswrapper[4797]: I0104 13:07:38.796731 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:40 crc kubenswrapper[4797]: I0104 13:07:40.254822 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f7bk5" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="registry-server" containerID="cri-o://7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac" gracePeriod=2 Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.774324 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.872366 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content\") pod \"8a683e45-499d-4a9b-a58a-91ddb2e71318\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.872452 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities\") pod \"8a683e45-499d-4a9b-a58a-91ddb2e71318\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.872480 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jz86\" (UniqueName: \"kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86\") pod \"8a683e45-499d-4a9b-a58a-91ddb2e71318\" (UID: \"8a683e45-499d-4a9b-a58a-91ddb2e71318\") " Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.873236 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities" (OuterVolumeSpecName: "utilities") pod "8a683e45-499d-4a9b-a58a-91ddb2e71318" (UID: "8a683e45-499d-4a9b-a58a-91ddb2e71318"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.892599 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86" (OuterVolumeSpecName: "kube-api-access-8jz86") pod "8a683e45-499d-4a9b-a58a-91ddb2e71318" (UID: "8a683e45-499d-4a9b-a58a-91ddb2e71318"). InnerVolumeSpecName "kube-api-access-8jz86". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.974302 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:07:41 crc kubenswrapper[4797]: I0104 13:07:41.974564 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jz86\" (UniqueName: \"kubernetes.io/projected/8a683e45-499d-4a9b-a58a-91ddb2e71318-kube-api-access-8jz86\") on node \"crc\" DevicePath \"\"" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.003409 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a683e45-499d-4a9b-a58a-91ddb2e71318" (UID: "8a683e45-499d-4a9b-a58a-91ddb2e71318"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.075880 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a683e45-499d-4a9b-a58a-91ddb2e71318-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.271091 4797 generic.go:334] "Generic (PLEG): container finished" podID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerID="7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac" exitCode=0 Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.271150 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerDied","Data":"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac"} Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.271182 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f7bk5" event={"ID":"8a683e45-499d-4a9b-a58a-91ddb2e71318","Type":"ContainerDied","Data":"75ebe0a22153abcc57f5c4973c04f6f37ff91133485b3a87d36c4404c9df63b9"} Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.271205 4797 scope.go:117] "RemoveContainer" containerID="7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.271200 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f7bk5" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.299552 4797 scope.go:117] "RemoveContainer" containerID="a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.302642 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.328957 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f7bk5"] Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.347271 4797 scope.go:117] "RemoveContainer" containerID="dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.374202 4797 scope.go:117] "RemoveContainer" containerID="7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac" Jan 04 13:07:42 crc kubenswrapper[4797]: E0104 13:07:42.374655 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac\": container with ID starting with 7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac not found: ID does not exist" containerID="7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.374738 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac"} err="failed to get container status \"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac\": rpc error: code = NotFound desc = could not find container \"7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac\": container with ID starting with 7b4989a51d442c0c993eaf61b8f2e561424ef2145029af2f575f718b717bfeac not found: ID does not exist" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.374819 4797 scope.go:117] "RemoveContainer" containerID="a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d" Jan 04 13:07:42 crc kubenswrapper[4797]: E0104 13:07:42.375250 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d\": container with ID starting with a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d not found: ID does not exist" containerID="a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.375325 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d"} err="failed to get container status \"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d\": rpc error: code = NotFound desc = could not find container \"a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d\": container with ID starting with a1b4f6a5a8f1b6edfdd9f8934202f9c342c481872ffcb25f57116401f33e937d not found: ID does not exist" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.375402 4797 scope.go:117] "RemoveContainer" containerID="dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf" Jan 04 13:07:42 crc kubenswrapper[4797]: E0104 13:07:42.375757 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf\": container with ID starting with dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf not found: ID does not exist" containerID="dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.375833 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf"} err="failed to get container status \"dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf\": rpc error: code = NotFound desc = could not find container \"dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf\": container with ID starting with dec57dbf7804fa5108f8ef4ba5ec067525e11449c480fab4d3dd7e281704ebaf not found: ID does not exist" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.381107 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-52zcz_1b22c6d0-408c-4506-a7aa-b444f4798cb9/nmstate-console-plugin/0.log" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.505650 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-x8f7b_971f370a-a8c4-409f-bef8-d1c3e8bc048b/nmstate-handler/0.log" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.567005 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-5j8jd_2e39d5cd-01ae-48c3-9281-177a2b2591d9/nmstate-metrics/0.log" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.567591 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-5j8jd_2e39d5cd-01ae-48c3-9281-177a2b2591d9/kube-rbac-proxy/0.log" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.714054 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-2w96k_7f178f28-d4be-4076-b5b0-549a693eae61/nmstate-operator/0.log" Jan 04 13:07:42 crc kubenswrapper[4797]: I0104 13:07:42.735178 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-fvdtc_2a23c2a7-dc2b-49d3-ad4b-ac9ef1440a6d/nmstate-webhook/0.log" Jan 04 13:07:43 crc kubenswrapper[4797]: I0104 13:07:43.481418 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" path="/var/lib/kubelet/pods/8a683e45-499d-4a9b-a58a-91ddb2e71318/volumes" Jan 04 13:07:57 crc kubenswrapper[4797]: I0104 13:07:57.985621 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-r6lt7_600cb394-290a-4c70-bd38-2e32e170fa8b/kube-rbac-proxy/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.054914 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-r6lt7_600cb394-290a-4c70-bd38-2e32e170fa8b/controller/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.189659 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-frr-files/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.386740 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-metrics/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.389432 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-reloader/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.392386 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-frr-files/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.413467 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-reloader/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.539320 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-frr-files/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.554512 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-reloader/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.556185 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-metrics/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.591436 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-metrics/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.736265 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-frr-files/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.740123 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-reloader/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.759169 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/cp-metrics/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.782160 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/controller/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.933803 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/frr-metrics/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.956807 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/kube-rbac-proxy-frr/0.log" Jan 04 13:07:58 crc kubenswrapper[4797]: I0104 13:07:58.963122 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/kube-rbac-proxy/0.log" Jan 04 13:07:59 crc kubenswrapper[4797]: I0104 13:07:59.152398 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/reloader/0.log" Jan 04 13:07:59 crc kubenswrapper[4797]: I0104 13:07:59.179497 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-ncm97_48107c7b-5bf1-459a-bdbf-ea855afdad7e/frr-k8s-webhook-server/0.log" Jan 04 13:07:59 crc kubenswrapper[4797]: I0104 13:07:59.365330 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5448cb6b96-v2wrd_5ca974ff-1854-4e86-92e7-d5f1b0f66571/manager/0.log" Jan 04 13:07:59 crc kubenswrapper[4797]: I0104 13:07:59.532644 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-fb667c75f-f5h45_598502ca-9f0c-4b07-ac22-3d50604f562a/webhook-server/0.log" Jan 04 13:07:59 crc kubenswrapper[4797]: I0104 13:07:59.635869 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8vt7n_682a1858-fd65-41a5-aaf2-3def57491fec/kube-rbac-proxy/0.log" Jan 04 13:08:00 crc kubenswrapper[4797]: I0104 13:08:00.034765 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8vt7n_682a1858-fd65-41a5-aaf2-3def57491fec/speaker/0.log" Jan 04 13:08:00 crc kubenswrapper[4797]: I0104 13:08:00.140641 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkjmb_313f749d-6032-4016-a071-6e5e0fa06d87/frr/0.log" Jan 04 13:08:14 crc kubenswrapper[4797]: I0104 13:08:14.435869 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/util/0.log" Jan 04 13:08:14 crc kubenswrapper[4797]: I0104 13:08:14.784937 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/util/0.log" Jan 04 13:08:14 crc kubenswrapper[4797]: I0104 13:08:14.786004 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/pull/0.log" Jan 04 13:08:14 crc kubenswrapper[4797]: I0104 13:08:14.833388 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/pull/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.017821 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/util/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.025117 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/pull/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.026728 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a8qt8h_01093944-b8c9-40f4-a688-4fcae8488819/extract/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.580273 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/util/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.726362 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/pull/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.777031 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/util/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.801341 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/pull/0.log" Jan 04 13:08:15 crc kubenswrapper[4797]: I0104 13:08:15.971832 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/extract/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.000148 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/util/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.035779 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4fflqr_8fa557e6-a3f8-47cb-88a5-94840f015b14/pull/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.191132 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/util/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.312618 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/util/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.343043 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/pull/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.355449 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/pull/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.494928 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/util/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.515108 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/extract/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.538210 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82bc5w_81203228-c493-4cf1-9299-f5d46acba957/pull/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.655462 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-utilities/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.859210 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-content/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.870433 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-utilities/0.log" Jan 04 13:08:16 crc kubenswrapper[4797]: I0104 13:08:16.889679 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-content/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.079526 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-content/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.080405 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/extract-utilities/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.238897 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-utilities/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.447861 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-content/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.478062 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-utilities/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.485022 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-content/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.671392 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s6f2t_e1ff8919-8dda-4f12-84bf-78f0014b5ec5/registry-server/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.734647 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-content/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.744924 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/extract-utilities/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.893235 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-29fg5_8e644b6e-1006-4761-a4e9-b0af15833725/marketplace-operator/0.log" Jan 04 13:08:17 crc kubenswrapper[4797]: I0104 13:08:17.993501 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-utilities/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.117737 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-utilities/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.133586 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.170610 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.414739 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-utilities/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.471875 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.524340 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p48hh_7d9ff7e5-585d-49c0-a6cf-b2c1964fe3d1/registry-server/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.589670 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-utilities/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.601229 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-7pzcc_48f385bf-de22-4bcc-9f95-ad5fc822c631/registry-server/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.718492 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-utilities/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.756239 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.765973 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.915437 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-content/0.log" Jan 04 13:08:18 crc kubenswrapper[4797]: I0104 13:08:18.921855 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/extract-utilities/0.log" Jan 04 13:08:19 crc kubenswrapper[4797]: I0104 13:08:19.499422 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zt2ck_8cd0fba6-5837-4843-a86c-9443222d1961/registry-server/0.log" Jan 04 13:08:49 crc kubenswrapper[4797]: I0104 13:08:49.493438 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:08:49 crc kubenswrapper[4797]: I0104 13:08:49.494207 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:09:19 crc kubenswrapper[4797]: I0104 13:09:19.493399 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:09:19 crc kubenswrapper[4797]: I0104 13:09:19.493954 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:09:26 crc kubenswrapper[4797]: I0104 13:09:26.148397 4797 generic.go:334] "Generic (PLEG): container finished" podID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerID="843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb" exitCode=0 Jan 04 13:09:26 crc kubenswrapper[4797]: I0104 13:09:26.149059 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s567t/must-gather-t9fl9" event={"ID":"d009ae5a-2d1d-4458-b654-bf30d1a32bc1","Type":"ContainerDied","Data":"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb"} Jan 04 13:09:26 crc kubenswrapper[4797]: I0104 13:09:26.149747 4797 scope.go:117] "RemoveContainer" containerID="843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb" Jan 04 13:09:26 crc kubenswrapper[4797]: I0104 13:09:26.639206 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s567t_must-gather-t9fl9_d009ae5a-2d1d-4458-b654-bf30d1a32bc1/gather/0.log" Jan 04 13:09:33 crc kubenswrapper[4797]: I0104 13:09:33.436859 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s567t/must-gather-t9fl9"] Jan 04 13:09:33 crc kubenswrapper[4797]: I0104 13:09:33.437711 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-s567t/must-gather-t9fl9" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="copy" containerID="cri-o://d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d" gracePeriod=2 Jan 04 13:09:33 crc kubenswrapper[4797]: I0104 13:09:33.447955 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s567t/must-gather-t9fl9"] Jan 04 13:09:33 crc kubenswrapper[4797]: I0104 13:09:33.902548 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s567t_must-gather-t9fl9_d009ae5a-2d1d-4458-b654-bf30d1a32bc1/copy/0.log" Jan 04 13:09:33 crc kubenswrapper[4797]: I0104 13:09:33.903180 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.065355 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output\") pod \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.065504 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sns5g\" (UniqueName: \"kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g\") pod \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\" (UID: \"d009ae5a-2d1d-4458-b654-bf30d1a32bc1\") " Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.133922 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g" (OuterVolumeSpecName: "kube-api-access-sns5g") pod "d009ae5a-2d1d-4458-b654-bf30d1a32bc1" (UID: "d009ae5a-2d1d-4458-b654-bf30d1a32bc1"). InnerVolumeSpecName "kube-api-access-sns5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.150578 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d009ae5a-2d1d-4458-b654-bf30d1a32bc1" (UID: "d009ae5a-2d1d-4458-b654-bf30d1a32bc1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.166764 4797 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.166811 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sns5g\" (UniqueName: \"kubernetes.io/projected/d009ae5a-2d1d-4458-b654-bf30d1a32bc1-kube-api-access-sns5g\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.216624 4797 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s567t_must-gather-t9fl9_d009ae5a-2d1d-4458-b654-bf30d1a32bc1/copy/0.log" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.217068 4797 generic.go:334] "Generic (PLEG): container finished" podID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerID="d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d" exitCode=143 Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.217134 4797 scope.go:117] "RemoveContainer" containerID="d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.217188 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s567t/must-gather-t9fl9" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.237408 4797 scope.go:117] "RemoveContainer" containerID="843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.294032 4797 scope.go:117] "RemoveContainer" containerID="d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d" Jan 04 13:09:34 crc kubenswrapper[4797]: E0104 13:09:34.294578 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d\": container with ID starting with d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d not found: ID does not exist" containerID="d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.294641 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d"} err="failed to get container status \"d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d\": rpc error: code = NotFound desc = could not find container \"d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d\": container with ID starting with d569014b48755ac87d41b0fb6ed7c41dba0c2a3087a18e961e10fb5e0909281d not found: ID does not exist" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.294673 4797 scope.go:117] "RemoveContainer" containerID="843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb" Jan 04 13:09:34 crc kubenswrapper[4797]: E0104 13:09:34.295133 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb\": container with ID starting with 843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb not found: ID does not exist" containerID="843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb" Jan 04 13:09:34 crc kubenswrapper[4797]: I0104 13:09:34.295171 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb"} err="failed to get container status \"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb\": rpc error: code = NotFound desc = could not find container \"843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb\": container with ID starting with 843cc8ee58b0683ba4cab3a93e9e9119f5d928f7d7eef5d7c3543097a121dafb not found: ID does not exist" Jan 04 13:09:35 crc kubenswrapper[4797]: I0104 13:09:35.490609 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" path="/var/lib/kubelet/pods/d009ae5a-2d1d-4458-b654-bf30d1a32bc1/volumes" Jan 04 13:09:49 crc kubenswrapper[4797]: I0104 13:09:49.492663 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:09:49 crc kubenswrapper[4797]: I0104 13:09:49.493497 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:09:49 crc kubenswrapper[4797]: I0104 13:09:49.493567 4797 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" Jan 04 13:09:49 crc kubenswrapper[4797]: I0104 13:09:49.494422 4797 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7"} pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:09:49 crc kubenswrapper[4797]: I0104 13:09:49.494514 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" containerID="cri-o://cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7" gracePeriod=600 Jan 04 13:09:50 crc kubenswrapper[4797]: E0104 13:09:50.032375 4797 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f279bbd_812a_4617_b821_852c35954cb6.slice/crio-conmon-cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7.scope\": RecentStats: unable to find data in memory cache]" Jan 04 13:09:50 crc kubenswrapper[4797]: I0104 13:09:50.346613 4797 generic.go:334] "Generic (PLEG): container finished" podID="9f279bbd-812a-4617-b821-852c35954cb6" containerID="cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7" exitCode=0 Jan 04 13:09:50 crc kubenswrapper[4797]: I0104 13:09:50.346673 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerDied","Data":"cb3969ff4c6f3b465799a96a51804cfef6a7e0bd29dc91567fe218e1688a53f7"} Jan 04 13:09:50 crc kubenswrapper[4797]: I0104 13:09:50.346715 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" event={"ID":"9f279bbd-812a-4617-b821-852c35954cb6","Type":"ContainerStarted","Data":"8491361010d75002ccf15868c3cb070621c280c1200127e15f647e5c217f53e1"} Jan 04 13:09:50 crc kubenswrapper[4797]: I0104 13:09:50.346731 4797 scope.go:117] "RemoveContainer" containerID="b055439d3f9b22b24bdb4069836b0973dd094c81c4795f4f9ba788fca28d4a6a" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.666162 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:11:51 crc kubenswrapper[4797]: E0104 13:11:51.667415 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="registry-server" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667446 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="registry-server" Jan 04 13:11:51 crc kubenswrapper[4797]: E0104 13:11:51.667479 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="extract-content" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667495 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="extract-content" Jan 04 13:11:51 crc kubenswrapper[4797]: E0104 13:11:51.667518 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="copy" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667531 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="copy" Jan 04 13:11:51 crc kubenswrapper[4797]: E0104 13:11:51.667553 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="gather" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667565 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="gather" Jan 04 13:11:51 crc kubenswrapper[4797]: E0104 13:11:51.667608 4797 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="extract-utilities" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667621 4797 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="extract-utilities" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667884 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="copy" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667910 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="d009ae5a-2d1d-4458-b654-bf30d1a32bc1" containerName="gather" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.667941 4797 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a683e45-499d-4a9b-a58a-91ddb2e71318" containerName="registry-server" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.670244 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.674649 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.764166 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.764314 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.764351 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w85w\" (UniqueName: \"kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.865751 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.865970 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.866037 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w85w\" (UniqueName: \"kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.867508 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.869203 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:51 crc kubenswrapper[4797]: I0104 13:11:51.899625 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w85w\" (UniqueName: \"kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w\") pod \"community-operators-7nkq5\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:52 crc kubenswrapper[4797]: I0104 13:11:52.010697 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:11:52 crc kubenswrapper[4797]: I0104 13:11:52.540142 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.433195 4797 generic.go:334] "Generic (PLEG): container finished" podID="47edb7a4-3302-411e-ad25-4aa2b1666f1a" containerID="b0c035e4775d2f965e27388cd0ce8f0c523146ffbb09b27372d4191d331f0ee8" exitCode=0 Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.433239 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerDied","Data":"b0c035e4775d2f965e27388cd0ce8f0c523146ffbb09b27372d4191d331f0ee8"} Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.433264 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerStarted","Data":"300cc9c072e89c4c36fcb3ebda4ee1ba1f0921f2b9076a34a50d79266913a2d8"} Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.435539 4797 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.458224 4797 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.462811 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.471341 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.493587 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.493749 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqlmq\" (UniqueName: \"kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.493815 4797 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.595532 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqlmq\" (UniqueName: \"kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.595626 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.595701 4797 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.596715 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.597504 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.623223 4797 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqlmq\" (UniqueName: \"kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq\") pod \"certified-operators-5px2r\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:53 crc kubenswrapper[4797]: I0104 13:11:53.798276 4797 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:11:54 crc kubenswrapper[4797]: I0104 13:11:54.249490 4797 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:11:54 crc kubenswrapper[4797]: W0104 13:11:54.261406 4797 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64cb1207_7d76_4839_937e_67a0e9354c49.slice/crio-e1e606c52763802ade50dbc4d01726601d4b000d028136857c00544f2a0ed7af WatchSource:0}: Error finding container e1e606c52763802ade50dbc4d01726601d4b000d028136857c00544f2a0ed7af: Status 404 returned error can't find the container with id e1e606c52763802ade50dbc4d01726601d4b000d028136857c00544f2a0ed7af Jan 04 13:11:54 crc kubenswrapper[4797]: I0104 13:11:54.447047 4797 generic.go:334] "Generic (PLEG): container finished" podID="64cb1207-7d76-4839-937e-67a0e9354c49" containerID="c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe" exitCode=0 Jan 04 13:11:54 crc kubenswrapper[4797]: I0104 13:11:54.447118 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerDied","Data":"c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe"} Jan 04 13:11:54 crc kubenswrapper[4797]: I0104 13:11:54.447148 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerStarted","Data":"e1e606c52763802ade50dbc4d01726601d4b000d028136857c00544f2a0ed7af"} Jan 04 13:11:54 crc kubenswrapper[4797]: I0104 13:11:54.448941 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerStarted","Data":"c2e5816c4aec4696d1d600dde70612852ee2ab0290b72e61a12257b2303b8459"} Jan 04 13:11:55 crc kubenswrapper[4797]: I0104 13:11:55.480717 4797 generic.go:334] "Generic (PLEG): container finished" podID="47edb7a4-3302-411e-ad25-4aa2b1666f1a" containerID="c2e5816c4aec4696d1d600dde70612852ee2ab0290b72e61a12257b2303b8459" exitCode=0 Jan 04 13:11:55 crc kubenswrapper[4797]: I0104 13:11:55.494822 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerDied","Data":"c2e5816c4aec4696d1d600dde70612852ee2ab0290b72e61a12257b2303b8459"} Jan 04 13:11:55 crc kubenswrapper[4797]: I0104 13:11:55.494861 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerStarted","Data":"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b"} Jan 04 13:11:56 crc kubenswrapper[4797]: I0104 13:11:56.493159 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerStarted","Data":"28d27160cffab7e2d421d028330631e7e5ecdbf410f36f5afe951b498b54cadf"} Jan 04 13:11:56 crc kubenswrapper[4797]: I0104 13:11:56.497414 4797 generic.go:334] "Generic (PLEG): container finished" podID="64cb1207-7d76-4839-937e-67a0e9354c49" containerID="df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b" exitCode=0 Jan 04 13:11:56 crc kubenswrapper[4797]: I0104 13:11:56.497546 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerDied","Data":"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b"} Jan 04 13:11:56 crc kubenswrapper[4797]: I0104 13:11:56.519245 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7nkq5" podStartSLOduration=3.062905381 podStartE2EDuration="5.519142127s" podCreationTimestamp="2026-01-04 13:11:51 +0000 UTC" firstStartedPulling="2026-01-04 13:11:53.434739867 +0000 UTC m=+5012.291926576" lastFinishedPulling="2026-01-04 13:11:55.890976613 +0000 UTC m=+5014.748163322" observedRunningTime="2026-01-04 13:11:56.512328516 +0000 UTC m=+5015.369515235" watchObservedRunningTime="2026-01-04 13:11:56.519142127 +0000 UTC m=+5015.376328836" Jan 04 13:11:57 crc kubenswrapper[4797]: I0104 13:11:57.506296 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerStarted","Data":"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f"} Jan 04 13:11:57 crc kubenswrapper[4797]: I0104 13:11:57.526437 4797 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5px2r" podStartSLOduration=2.079668278 podStartE2EDuration="4.526417321s" podCreationTimestamp="2026-01-04 13:11:53 +0000 UTC" firstStartedPulling="2026-01-04 13:11:54.448119555 +0000 UTC m=+5013.305306264" lastFinishedPulling="2026-01-04 13:11:56.894868608 +0000 UTC m=+5015.752055307" observedRunningTime="2026-01-04 13:11:57.525413595 +0000 UTC m=+5016.382600294" watchObservedRunningTime="2026-01-04 13:11:57.526417321 +0000 UTC m=+5016.383604030" Jan 04 13:12:02 crc kubenswrapper[4797]: I0104 13:12:02.011781 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:02 crc kubenswrapper[4797]: I0104 13:12:02.012663 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:02 crc kubenswrapper[4797]: I0104 13:12:02.078727 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:02 crc kubenswrapper[4797]: I0104 13:12:02.621864 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:02 crc kubenswrapper[4797]: I0104 13:12:02.692897 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:12:03 crc kubenswrapper[4797]: I0104 13:12:03.799456 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:03 crc kubenswrapper[4797]: I0104 13:12:03.799972 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:03 crc kubenswrapper[4797]: I0104 13:12:03.871137 4797 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:04 crc kubenswrapper[4797]: I0104 13:12:04.557296 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7nkq5" podUID="47edb7a4-3302-411e-ad25-4aa2b1666f1a" containerName="registry-server" containerID="cri-o://28d27160cffab7e2d421d028330631e7e5ecdbf410f36f5afe951b498b54cadf" gracePeriod=2 Jan 04 13:12:04 crc kubenswrapper[4797]: I0104 13:12:04.646503 4797 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:05 crc kubenswrapper[4797]: I0104 13:12:05.118713 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:12:05 crc kubenswrapper[4797]: I0104 13:12:05.568307 4797 generic.go:334] "Generic (PLEG): container finished" podID="47edb7a4-3302-411e-ad25-4aa2b1666f1a" containerID="28d27160cffab7e2d421d028330631e7e5ecdbf410f36f5afe951b498b54cadf" exitCode=0 Jan 04 13:12:05 crc kubenswrapper[4797]: I0104 13:12:05.568392 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerDied","Data":"28d27160cffab7e2d421d028330631e7e5ecdbf410f36f5afe951b498b54cadf"} Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.184254 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.303932 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w85w\" (UniqueName: \"kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w\") pod \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.304036 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities\") pod \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.304127 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content\") pod \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\" (UID: \"47edb7a4-3302-411e-ad25-4aa2b1666f1a\") " Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.305118 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities" (OuterVolumeSpecName: "utilities") pod "47edb7a4-3302-411e-ad25-4aa2b1666f1a" (UID: "47edb7a4-3302-411e-ad25-4aa2b1666f1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.319289 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w" (OuterVolumeSpecName: "kube-api-access-7w85w") pod "47edb7a4-3302-411e-ad25-4aa2b1666f1a" (UID: "47edb7a4-3302-411e-ad25-4aa2b1666f1a"). InnerVolumeSpecName "kube-api-access-7w85w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.391450 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47edb7a4-3302-411e-ad25-4aa2b1666f1a" (UID: "47edb7a4-3302-411e-ad25-4aa2b1666f1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.406034 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w85w\" (UniqueName: \"kubernetes.io/projected/47edb7a4-3302-411e-ad25-4aa2b1666f1a-kube-api-access-7w85w\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.406076 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.406089 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47edb7a4-3302-411e-ad25-4aa2b1666f1a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.579336 4797 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5px2r" podUID="64cb1207-7d76-4839-937e-67a0e9354c49" containerName="registry-server" containerID="cri-o://ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f" gracePeriod=2 Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.579433 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nkq5" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.583135 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nkq5" event={"ID":"47edb7a4-3302-411e-ad25-4aa2b1666f1a","Type":"ContainerDied","Data":"300cc9c072e89c4c36fcb3ebda4ee1ba1f0921f2b9076a34a50d79266913a2d8"} Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.583226 4797 scope.go:117] "RemoveContainer" containerID="28d27160cffab7e2d421d028330631e7e5ecdbf410f36f5afe951b498b54cadf" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.612590 4797 scope.go:117] "RemoveContainer" containerID="c2e5816c4aec4696d1d600dde70612852ee2ab0290b72e61a12257b2303b8459" Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.620280 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.633964 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7nkq5"] Jan 04 13:12:06 crc kubenswrapper[4797]: I0104 13:12:06.646866 4797 scope.go:117] "RemoveContainer" containerID="b0c035e4775d2f965e27388cd0ce8f0c523146ffbb09b27372d4191d331f0ee8" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.487257 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47edb7a4-3302-411e-ad25-4aa2b1666f1a" path="/var/lib/kubelet/pods/47edb7a4-3302-411e-ad25-4aa2b1666f1a/volumes" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.500423 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.521058 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content\") pod \"64cb1207-7d76-4839-937e-67a0e9354c49\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.521107 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqlmq\" (UniqueName: \"kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq\") pod \"64cb1207-7d76-4839-937e-67a0e9354c49\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.521162 4797 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities\") pod \"64cb1207-7d76-4839-937e-67a0e9354c49\" (UID: \"64cb1207-7d76-4839-937e-67a0e9354c49\") " Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.530157 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq" (OuterVolumeSpecName: "kube-api-access-dqlmq") pod "64cb1207-7d76-4839-937e-67a0e9354c49" (UID: "64cb1207-7d76-4839-937e-67a0e9354c49"). InnerVolumeSpecName "kube-api-access-dqlmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.543132 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities" (OuterVolumeSpecName: "utilities") pod "64cb1207-7d76-4839-937e-67a0e9354c49" (UID: "64cb1207-7d76-4839-937e-67a0e9354c49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.602182 4797 generic.go:334] "Generic (PLEG): container finished" podID="64cb1207-7d76-4839-937e-67a0e9354c49" containerID="ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f" exitCode=0 Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.602277 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerDied","Data":"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f"} Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.602325 4797 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5px2r" event={"ID":"64cb1207-7d76-4839-937e-67a0e9354c49","Type":"ContainerDied","Data":"e1e606c52763802ade50dbc4d01726601d4b000d028136857c00544f2a0ed7af"} Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.602343 4797 scope.go:117] "RemoveContainer" containerID="ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.602474 4797 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5px2r" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.605500 4797 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64cb1207-7d76-4839-937e-67a0e9354c49" (UID: "64cb1207-7d76-4839-937e-67a0e9354c49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.622904 4797 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.622963 4797 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqlmq\" (UniqueName: \"kubernetes.io/projected/64cb1207-7d76-4839-937e-67a0e9354c49-kube-api-access-dqlmq\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.622981 4797 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64cb1207-7d76-4839-937e-67a0e9354c49-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.638107 4797 scope.go:117] "RemoveContainer" containerID="df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.655396 4797 scope.go:117] "RemoveContainer" containerID="c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.699098 4797 scope.go:117] "RemoveContainer" containerID="ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f" Jan 04 13:12:07 crc kubenswrapper[4797]: E0104 13:12:07.699511 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f\": container with ID starting with ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f not found: ID does not exist" containerID="ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.699538 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f"} err="failed to get container status \"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f\": rpc error: code = NotFound desc = could not find container \"ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f\": container with ID starting with ec7a7705d7d3eb4f0d03756fcfe4b5e61387bce59da2e3ebe5b545d891e4fa5f not found: ID does not exist" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.699584 4797 scope.go:117] "RemoveContainer" containerID="df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b" Jan 04 13:12:07 crc kubenswrapper[4797]: E0104 13:12:07.699873 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b\": container with ID starting with df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b not found: ID does not exist" containerID="df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.699923 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b"} err="failed to get container status \"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b\": rpc error: code = NotFound desc = could not find container \"df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b\": container with ID starting with df93c7c6cc05b1e7223aca01ee34451318a20af861dc1a07069757e75e62ce5b not found: ID does not exist" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.699936 4797 scope.go:117] "RemoveContainer" containerID="c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe" Jan 04 13:12:07 crc kubenswrapper[4797]: E0104 13:12:07.700171 4797 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe\": container with ID starting with c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe not found: ID does not exist" containerID="c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.700189 4797 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe"} err="failed to get container status \"c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe\": rpc error: code = NotFound desc = could not find container \"c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe\": container with ID starting with c824c9d3f05583c7954dfe30ad5f137dbab49d8c7a1f915bbc6bc24a37f4babe not found: ID does not exist" Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.956346 4797 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:12:07 crc kubenswrapper[4797]: I0104 13:12:07.968813 4797 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5px2r"] Jan 04 13:12:09 crc kubenswrapper[4797]: I0104 13:12:09.490409 4797 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64cb1207-7d76-4839-937e-67a0e9354c49" path="/var/lib/kubelet/pods/64cb1207-7d76-4839-937e-67a0e9354c49/volumes" Jan 04 13:12:19 crc kubenswrapper[4797]: I0104 13:12:19.492523 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:12:19 crc kubenswrapper[4797]: I0104 13:12:19.493074 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:12:49 crc kubenswrapper[4797]: I0104 13:12:49.492795 4797 patch_prober.go:28] interesting pod/machine-config-daemon-2dbq6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:12:49 crc kubenswrapper[4797]: I0104 13:12:49.493439 4797 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2dbq6" podUID="9f279bbd-812a-4617-b821-852c35954cb6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515126463535024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015126463536017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015126451226016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015126451227015462 5ustar corecore